QCamera3HWI.cpp revision bed747e59c1b82688b547e2ee01746a4685c15ea
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <sync/sync.h>
46#include <gralloc_priv.h>
47#include "util/QCameraFlash.h"
48#include "QCamera3HWI.h"
49#include "QCamera3Mem.h"
50#include "QCamera3Channel.h"
51#include "QCamera3PostProc.h"
52#include "QCamera3VendorTags.h"
53
54using namespace android;
55
56namespace qcamera {
57
58#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60#define EMPTY_PIPELINE_DELAY 2
61#define PARTIAL_RESULT_COUNT 2
62#define FRAME_SKIP_DELAY     0
63#define CAM_MAX_SYNC_LATENCY 4
64
65#define MAX_VALUE_8BIT ((1<<8)-1)
66#define MAX_VALUE_10BIT ((1<<10)-1)
67#define MAX_VALUE_12BIT ((1<<12)-1)
68
69#define VIDEO_4K_WIDTH  3840
70#define VIDEO_4K_HEIGHT 2160
71
72#define MAX_EIS_WIDTH 1920
73#define MAX_EIS_HEIGHT 1080
74
75#define MAX_RAW_STREAMS        1
76#define MAX_STALLING_STREAMS   1
77#define MAX_PROCESSED_STREAMS  3
78/* Batch mode is enabled only if FPS set is equal to or greater than this */
79#define MIN_FPS_FOR_BATCH_MODE (120)
80#define PREVIEW_FPS_FOR_HFR    (30)
81#define DEFAULT_VIDEO_FPS      (30.0)
82#define MAX_HFR_BATCH_SIZE     (4)
83#define REGIONS_TUPLE_COUNT    5
84
85#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
86
87#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
88                                                CAM_QCOM_FEATURE_CROP |\
89                                                CAM_QCOM_FEATURE_ROTATION |\
90                                                CAM_QCOM_FEATURE_SHARPNESS |\
91                                                CAM_QCOM_FEATURE_SCALE |\
92                                                CAM_QCOM_FEATURE_CAC )
93#define TIMEOUT_NEVER -1
94
95cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
96const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
97static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
98volatile uint32_t gCamHal3LogLevel = 1;
99
100const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
101    {"On",  CAM_CDS_MODE_ON},
102    {"Off", CAM_CDS_MODE_OFF},
103    {"Auto",CAM_CDS_MODE_AUTO}
104};
105
106const QCamera3HardwareInterface::QCameraMap<
107        camera_metadata_enum_android_control_effect_mode_t,
108        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
109    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
110    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
111    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
112    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
113    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
114    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
115    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
116    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
117    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121        camera_metadata_enum_android_control_awb_mode_t,
122        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
123    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
124    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
125    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
126    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
127    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
128    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
129    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
130    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
131    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135        camera_metadata_enum_android_control_scene_mode_t,
136        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
137    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
138    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
139    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
140    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
141    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
142    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
143    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
144    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
145    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
146    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
147    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
148    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
149    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
150    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
151    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
152    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
153};
154
155const QCamera3HardwareInterface::QCameraMap<
156        camera_metadata_enum_android_control_af_mode_t,
157        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
158    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
159    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
160    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
161    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
162    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
163    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
164    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
165};
166
167const QCamera3HardwareInterface::QCameraMap<
168        camera_metadata_enum_android_color_correction_aberration_mode_t,
169        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
170    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
171            CAM_COLOR_CORRECTION_ABERRATION_OFF },
172    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
173            CAM_COLOR_CORRECTION_ABERRATION_FAST },
174    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
175            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
176};
177
178const QCamera3HardwareInterface::QCameraMap<
179        camera_metadata_enum_android_control_ae_antibanding_mode_t,
180        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
181    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
182    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
183    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
184    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
185};
186
187const QCamera3HardwareInterface::QCameraMap<
188        camera_metadata_enum_android_control_ae_mode_t,
189        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
190    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
191    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
192    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
193    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
194    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
195};
196
197const QCamera3HardwareInterface::QCameraMap<
198        camera_metadata_enum_android_flash_mode_t,
199        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
200    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
201    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
202    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206        camera_metadata_enum_android_statistics_face_detect_mode_t,
207        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
208    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
209    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
214        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
215    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
216      CAM_FOCUS_UNCALIBRATED },
217    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
218      CAM_FOCUS_APPROXIMATE },
219    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
220      CAM_FOCUS_CALIBRATED }
221};
222
223const QCamera3HardwareInterface::QCameraMap<
224        camera_metadata_enum_android_lens_state_t,
225        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
226    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
227    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
228};
229
230const int32_t available_thumbnail_sizes[] = {0, 0,
231                                             176, 144,
232                                             320, 240,
233                                             432, 288,
234                                             480, 288,
235                                             512, 288,
236                                             512, 384};
237
238const QCamera3HardwareInterface::QCameraMap<
239        camera_metadata_enum_android_sensor_test_pattern_mode_t,
240        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
241    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
242    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
243    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
244    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
245    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
246};
247
248/* Since there is no mapping for all the options some Android enum are not listed.
249 * Also, the order in this list is important because while mapping from HAL to Android it will
250 * traverse from lower to higher index which means that for HAL values that are map to different
251 * Android values, the traverse logic will select the first one found.
252 */
253const QCamera3HardwareInterface::QCameraMap<
254        camera_metadata_enum_android_sensor_reference_illuminant1_t,
255        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
256    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
257    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
258    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
259    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
260    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
268    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
269    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
270    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
271    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
272};
273
274const QCamera3HardwareInterface::QCameraMap<
275        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
276    { 60, CAM_HFR_MODE_60FPS},
277    { 90, CAM_HFR_MODE_90FPS},
278    { 120, CAM_HFR_MODE_120FPS},
279    { 150, CAM_HFR_MODE_150FPS},
280    { 180, CAM_HFR_MODE_180FPS},
281    { 210, CAM_HFR_MODE_210FPS},
282    { 240, CAM_HFR_MODE_240FPS},
283    { 480, CAM_HFR_MODE_480FPS},
284};
285
286camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
287    initialize:                         QCamera3HardwareInterface::initialize,
288    configure_streams:                  QCamera3HardwareInterface::configure_streams,
289    register_stream_buffers:            NULL,
290    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
291    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
292    get_metadata_vendor_tag_ops:        NULL,
293    dump:                               QCamera3HardwareInterface::dump,
294    flush:                              QCamera3HardwareInterface::flush,
295    reserved:                           {0},
296};
297
298/*===========================================================================
299 * FUNCTION   : QCamera3HardwareInterface
300 *
301 * DESCRIPTION: constructor of QCamera3HardwareInterface
302 *
303 * PARAMETERS :
304 *   @cameraId  : camera ID
305 *
306 * RETURN     : none
307 *==========================================================================*/
308QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
309        const camera_module_callbacks_t *callbacks)
310    : mCameraId(cameraId),
311      mCameraHandle(NULL),
312      mCameraOpened(false),
313      mCameraInitialized(false),
314      mCallbackOps(NULL),
315      mMetadataChannel(NULL),
316      mPictureChannel(NULL),
317      mRawChannel(NULL),
318      mSupportChannel(NULL),
319      mAnalysisChannel(NULL),
320      mRawDumpChannel(NULL),
321      mDummyBatchChannel(NULL),
322      mFirstRequest(false),
323      mFirstConfiguration(true),
324      mFlush(false),
325      mParamHeap(NULL),
326      mParameters(NULL),
327      mPrevParameters(NULL),
328      m_bIsVideo(false),
329      m_bIs4KVideo(false),
330      m_bEisSupportedSize(false),
331      m_bEisEnable(false),
332      m_MobicatMask(0),
333      mMinProcessedFrameDuration(0),
334      mMinJpegFrameDuration(0),
335      mMinRawFrameDuration(0),
336      m_pPowerModule(NULL),
337      mMetaFrameCount(0U),
338      mUpdateDebugLevel(false),
339      mCallbacks(callbacks),
340      mCaptureIntent(0),
341      mBatchSize(0),
342      mToBeQueuedVidBufs(0),
343      mHFRVideoFps(DEFAULT_VIDEO_FPS),
344      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
345      mPrevUrgentFrameNumber(0),
346      mPrevFrameNumber(0),
347      mNeedSensorRestart(false),
348      mPprocBypass(false)
349{
350    getLogLevel();
351    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
352    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
353    mCameraDevice.common.close = close_camera_device;
354    mCameraDevice.ops = &mCameraOps;
355    mCameraDevice.priv = this;
356    gCamCapability[cameraId]->version = CAM_HAL_V3;
357    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
358    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
359    gCamCapability[cameraId]->min_num_pp_bufs = 3;
360
361    pthread_cond_init(&mRequestCond, NULL);
362    mPendingRequest = 0;
363    mCurrentRequestId = -1;
364    pthread_mutex_init(&mMutex, NULL);
365
366    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
367        mDefaultMetadata[i] = NULL;
368
369#ifdef HAS_MULTIMEDIA_HINTS
370    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
371        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
372    }
373#endif
374
375    // Getting system props of different kinds
376    char prop[PROPERTY_VALUE_MAX];
377    memset(prop, 0, sizeof(prop));
378    property_get("persist.camera.raw.dump", prop, "0");
379    mEnableRawDump = atoi(prop);
380    if (mEnableRawDump)
381        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
382
383    memset(prop, 0, sizeof(prop));
384    property_get("persist.camera.facedetect", prop, "-1");
385    m_overrideAppFaceDetection = (int8_t)atoi(prop);
386    if (m_overrideAppFaceDetection >= 0)
387    {
388        CDBG_FATAL_IF(m_overrideAppFaceDetection > ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
389        CDBG("%s: Override face detection: %d", __func__, m_overrideAppFaceDetection);
390    }
391
392    memset(&mInputStreamSize, 0, sizeof(mInputStreamSize));
393}
394
395/*===========================================================================
396 * FUNCTION   : ~QCamera3HardwareInterface
397 *
398 * DESCRIPTION: destructor of QCamera3HardwareInterface
399 *
400 * PARAMETERS : none
401 *
402 * RETURN     : none
403 *==========================================================================*/
404QCamera3HardwareInterface::~QCamera3HardwareInterface()
405{
406    CDBG("%s: E", __func__);
407    /* We need to stop all streams before deleting any stream */
408
409
410    if (mRawDumpChannel) {
411        mRawDumpChannel->stop();
412    }
413
414    // NOTE: 'camera3_stream_t *' objects are already freed at
415    //        this stage by the framework
416    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
417        it != mStreamInfo.end(); it++) {
418        QCamera3ProcessingChannel *channel = (*it)->channel;
419        if (channel) {
420            channel->stop();
421        }
422    }
423    if (mSupportChannel)
424        mSupportChannel->stop();
425
426    if (mAnalysisChannel) {
427        mAnalysisChannel->stop();
428    }
429
430    /* Turn off video hint */
431    updatePowerHint(m_bIsVideo, false);
432
433    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
434        it != mStreamInfo.end(); it++) {
435        QCamera3ProcessingChannel *channel = (*it)->channel;
436        if (channel)
437            delete channel;
438        free (*it);
439    }
440    if (mSupportChannel) {
441        delete mSupportChannel;
442        mSupportChannel = NULL;
443    }
444
445    if (mAnalysisChannel) {
446        delete mAnalysisChannel;
447        mAnalysisChannel = NULL;
448    }
449    if (mRawDumpChannel) {
450        delete mRawDumpChannel;
451        mRawDumpChannel = NULL;
452    }
453    if (mDummyBatchChannel) {
454        delete mDummyBatchChannel;
455        mDummyBatchChannel = NULL;
456    }
457    mPictureChannel = NULL;
458
459    /* Clean up all channels */
460    if (mCameraInitialized) {
461        if (mMetadataChannel) {
462            mMetadataChannel->stop();
463            delete mMetadataChannel;
464            mMetadataChannel = NULL;
465        }
466        if(!mFirstConfiguration){
467            //send the last unconfigure
468            cam_stream_size_info_t stream_config_info;
469            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
470            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
471            stream_config_info.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
472            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
473                    stream_config_info);
474            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
475            if (rc < 0) {
476                ALOGE("%s: set_parms failed for unconfigure", __func__);
477            }
478        }
479        deinitParameters();
480    }
481
482    if (mCameraOpened)
483        closeCamera();
484
485    mPendingBuffersMap.mPendingBufferList.clear();
486    mPendingReprocessResultList.clear();
487    for (pendingRequestIterator i = mPendingRequestsList.begin();
488            i != mPendingRequestsList.end();) {
489        i = erasePendingRequest(i);
490    }
491    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
492        if (mDefaultMetadata[i])
493            free_camera_metadata(mDefaultMetadata[i]);
494
495    pthread_cond_destroy(&mRequestCond);
496
497    pthread_mutex_destroy(&mMutex);
498    CDBG("%s: X", __func__);
499}
500
501/*===========================================================================
502 * FUNCTION   : erasePendingRequest
503 *
504 * DESCRIPTION: function to erase a desired pending request after freeing any
505 *              allocated memory
506 *
507 * PARAMETERS :
508 *   @i       : iterator pointing to pending request to be erased
509 *
510 * RETURN     : iterator pointing to the next request
511 *==========================================================================*/
512QCamera3HardwareInterface::pendingRequestIterator
513        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
514{
515    if (i->input_buffer != NULL) {
516        free(i->input_buffer);
517        i->input_buffer = NULL;
518    }
519    return mPendingRequestsList.erase(i);
520}
521
522/*===========================================================================
523 * FUNCTION   : camEvtHandle
524 *
525 * DESCRIPTION: Function registered to mm-camera-interface to handle events
526 *
527 * PARAMETERS :
528 *   @camera_handle : interface layer camera handle
529 *   @evt           : ptr to event
530 *   @user_data     : user data ptr
531 *
532 * RETURN     : none
533 *==========================================================================*/
534void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
535                                          mm_camera_event_t *evt,
536                                          void *user_data)
537{
538    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
539    if (obj && evt) {
540        switch(evt->server_event_type) {
541            case CAM_EVENT_TYPE_DAEMON_DIED:
542                ALOGE("%s: Fatal, camera daemon died", __func__);
543                //close the camera backend
544                if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
545                        && obj->mCameraHandle->ops) {
546                    obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
547                } else {
548                    ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
549                            __func__);
550                }
551                camera3_notify_msg_t notify_msg;
552                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
553                notify_msg.type = CAMERA3_MSG_ERROR;
554                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
555                notify_msg.message.error.error_stream = NULL;
556                notify_msg.message.error.frame_number = 0;
557                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
558                break;
559
560            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
561                CDBG("%s: HAL got request pull from Daemon", __func__);
562                pthread_mutex_lock(&obj->mMutex);
563                obj->mWokenUpByDaemon = true;
564                obj->unblockRequestIfNecessary();
565                pthread_mutex_unlock(&obj->mMutex);
566                break;
567
568            default:
569                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
570                        evt->server_event_type);
571                break;
572        }
573    } else {
574        ALOGE("%s: NULL user_data/evt", __func__);
575    }
576}
577
578/*===========================================================================
579 * FUNCTION   : openCamera
580 *
581 * DESCRIPTION: open camera
582 *
583 * PARAMETERS :
584 *   @hw_device  : double ptr for camera device struct
585 *
586 * RETURN     : int32_t type of status
587 *              NO_ERROR  -- success
588 *              none-zero failure code
589 *==========================================================================*/
590int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
591{
592    int rc = 0;
593    if (mCameraOpened) {
594        *hw_device = NULL;
595        return PERMISSION_DENIED;
596    }
597
598    rc = openCamera();
599    if (rc == 0) {
600        *hw_device = &mCameraDevice.common;
601    } else
602        *hw_device = NULL;
603
604    return rc;
605}
606
607/*===========================================================================
608 * FUNCTION   : openCamera
609 *
610 * DESCRIPTION: open camera
611 *
612 * PARAMETERS : none
613 *
614 * RETURN     : int32_t type of status
615 *              NO_ERROR  -- success
616 *              none-zero failure code
617 *==========================================================================*/
618int QCamera3HardwareInterface::openCamera()
619{
620    int rc = 0;
621
622    ATRACE_CALL();
623    if (mCameraHandle) {
624        ALOGE("Failure: Camera already opened");
625        return ALREADY_EXISTS;
626    }
627
628    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
629    if (rc < 0) {
630        ALOGE("%s: Failed to reserve flash for camera id: %d",
631                __func__,
632                mCameraId);
633        return UNKNOWN_ERROR;
634    }
635
636    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
637    if (rc) {
638        ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
639        return rc;
640    }
641
642    mCameraOpened = true;
643
644    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
645            camEvtHandle, (void *)this);
646
647    if (rc < 0) {
648        ALOGE("%s: Error, failed to register event callback", __func__);
649        /* Not closing camera here since it is already handled in destructor */
650        return FAILED_TRANSACTION;
651    }
652    mFirstConfiguration = true;
653    return NO_ERROR;
654}
655
656/*===========================================================================
657 * FUNCTION   : closeCamera
658 *
659 * DESCRIPTION: close camera
660 *
661 * PARAMETERS : none
662 *
663 * RETURN     : int32_t type of status
664 *              NO_ERROR  -- success
665 *              none-zero failure code
666 *==========================================================================*/
667int QCamera3HardwareInterface::closeCamera()
668{
669    ATRACE_CALL();
670    int rc = NO_ERROR;
671
672    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
673    mCameraHandle = NULL;
674    mCameraOpened = false;
675
676    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
677        CDBG("%s: Failed to release flash for camera id: %d",
678                __func__,
679                mCameraId);
680    }
681
682    return rc;
683}
684
685/*===========================================================================
686 * FUNCTION   : initialize
687 *
688 * DESCRIPTION: Initialize frameworks callback functions
689 *
690 * PARAMETERS :
691 *   @callback_ops : callback function to frameworks
692 *
693 * RETURN     :
694 *
695 *==========================================================================*/
696int QCamera3HardwareInterface::initialize(
697        const struct camera3_callback_ops *callback_ops)
698{
699    ATRACE_CALL();
700    int rc;
701
702    pthread_mutex_lock(&mMutex);
703
704    rc = initParameters();
705    if (rc < 0) {
706        ALOGE("%s: initParamters failed %d", __func__, rc);
707       goto err1;
708    }
709    mCallbackOps = callback_ops;
710
711    pthread_mutex_unlock(&mMutex);
712    mCameraInitialized = true;
713    return 0;
714
715err1:
716    pthread_mutex_unlock(&mMutex);
717    return rc;
718}
719
720/*===========================================================================
721 * FUNCTION   : validateStreamDimensions
722 *
723 * DESCRIPTION: Check if the configuration requested are those advertised
724 *
725 * PARAMETERS :
726 *   @stream_list : streams to be configured
727 *
728 * RETURN     :
729 *
730 *==========================================================================*/
731int QCamera3HardwareInterface::validateStreamDimensions(
732        camera3_stream_configuration_t *streamList)
733{
734    int rc = NO_ERROR;
735    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
736    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
737    size_t count = 0;
738
739    camera3_stream_t *inputStream = NULL;
740    /*
741    * Loop through all streams to find input stream if it exists*
742    */
743    for (size_t i = 0; i< streamList->num_streams; i++) {
744        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
745            if (inputStream != NULL) {
746                ALOGE("%s: Error, Multiple input streams requested");
747                return -EINVAL;
748            }
749            inputStream = streamList->streams[i];
750        }
751    }
752    /*
753    * Loop through all streams requested in configuration
754    * Check if unsupported sizes have been requested on any of them
755    */
756    for (size_t j = 0; j < streamList->num_streams; j++) {
757        bool sizeFound = false;
758        size_t jpeg_sizes_cnt = 0;
759        camera3_stream_t *newStream = streamList->streams[j];
760
761        uint32_t rotatedHeight = newStream->height;
762        uint32_t rotatedWidth = newStream->width;
763        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
764                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
765            rotatedHeight = newStream->width;
766            rotatedWidth = newStream->height;
767        }
768
769        /*
770        * Sizes are different for each type of stream format check against
771        * appropriate table.
772        */
773        switch (newStream->format) {
774        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
775        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
776        case HAL_PIXEL_FORMAT_RAW10:
777            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
778            for (size_t i = 0; i < count; i++) {
779                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
780                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
781                    sizeFound = true;
782                    break;
783                }
784            }
785            break;
786        case HAL_PIXEL_FORMAT_BLOB:
787            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
788            /* Generate JPEG sizes table */
789            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
790                    count,
791                    MAX_SIZES_CNT,
792                    available_processed_sizes);
793            jpeg_sizes_cnt = filterJpegSizes(
794                    available_jpeg_sizes,
795                    available_processed_sizes,
796                    count * 2,
797                    MAX_SIZES_CNT * 2,
798                    gCamCapability[mCameraId]->active_array_size,
799                    gCamCapability[mCameraId]->max_downscale_factor);
800
801            /* Verify set size against generated sizes table */
802            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
803                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
804                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
805                    sizeFound = true;
806                    break;
807                }
808            }
809            break;
810        case HAL_PIXEL_FORMAT_YCbCr_420_888:
811        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
812        default:
813            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
814                    || newStream->stream_type == CAMERA3_STREAM_INPUT
815                    || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {
816                if (((int32_t)rotatedWidth ==
817                                gCamCapability[mCameraId]->active_array_size.width) &&
818                                ((int32_t)rotatedHeight ==
819                                gCamCapability[mCameraId]->active_array_size.height)) {
820                    sizeFound = true;
821                    break;
822                }
823                /* We could potentially break here to enforce ZSL stream
824                 * set from frameworks always is full active array size
825                 * but it is not clear from the spc if framework will always
826                 * follow that, also we have logic to override to full array
827                 * size, so keeping the logic lenient at the moment
828                 */
829            }
830            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
831                    MAX_SIZES_CNT);
832            for (size_t i = 0; i < count; i++) {
833                if (((int32_t)rotatedWidth ==
834                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
835                            ((int32_t)rotatedHeight ==
836                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
837                    sizeFound = true;
838                    break;
839                }
840            }
841            break;
842        } /* End of switch(newStream->format) */
843
844        /* We error out even if a single stream has unsupported size set */
845        if (!sizeFound) {
846            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
847                  "type:%d", __func__, rotatedWidth, rotatedHeight,
848                  newStream->format);
849            ALOGE("%s: Active array size is  %d x %d", __func__,
850                    gCamCapability[mCameraId]->active_array_size.width,
851                    gCamCapability[mCameraId]->active_array_size.height);
852            rc = -EINVAL;
853            break;
854        }
855    } /* End of for each stream */
856    return rc;
857}
858
859/*==============================================================================
860 * FUNCTION   : isSupportChannelNeeded
861 *
862 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
863 *
864 * PARAMETERS :
865 *   @stream_list : streams to be configured
866 *
867 * RETURN     : Boolen true/false decision
868 *
869 *==========================================================================*/
870bool QCamera3HardwareInterface::isSupportChannelNeeded(camera3_stream_configuration_t *streamList,
871        cam_stream_size_info_t stream_config_info)
872{
873    uint32_t i;
874    bool bSuperSetPresent = false;
875    /* Check for conditions where PProc pipeline does not have any streams*/
876    for (i = 0; i < stream_config_info.num_streams; i++) {
877        if (stream_config_info.postprocess_mask[i] == CAM_QCOM_FEATURE_PP_SUPERSET) {
878            bSuperSetPresent = true;
879            break;
880        }
881    }
882
883    if (bSuperSetPresent == false )
884        return true;
885
886    /* Dummy stream needed if only raw or jpeg streams present */
887    for (i = 0;i < streamList->num_streams;i++) {
888        switch(streamList->streams[i]->format) {
889            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
890            case HAL_PIXEL_FORMAT_RAW10:
891            case HAL_PIXEL_FORMAT_RAW16:
892            case HAL_PIXEL_FORMAT_BLOB:
893                break;
894            default:
895                return false;
896        }
897    }
898    return true;
899}
900
901/*==============================================================================
902 * FUNCTION   : getSensorOutputSize
903 *
904 * DESCRIPTION: Get sensor output size based on current stream configuratoin
905 *
906 * PARAMETERS :
907 *   @sensor_dim : sensor output dimension (output)
908 *
909 * RETURN     : int32_t type of status
910 *              NO_ERROR  -- success
911 *              none-zero failure code
912 *
913 *==========================================================================*/
914int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
915{
916    int32_t rc = NO_ERROR;
917
918    cam_dimension_t max_dim = {0, 0};
919    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
920        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
921            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
922        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
923            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
924    }
925
926    clear_metadata_buffer(mParameters);
927
928    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
929            max_dim);
930    if (rc != NO_ERROR) {
931        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
932        return rc;
933    }
934
935    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
936    if (rc != NO_ERROR) {
937        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
938        return rc;
939    }
940
941    clear_metadata_buffer(mParameters);
942    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
943
944    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
945            mParameters);
946    if (rc != NO_ERROR) {
947        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
948        return rc;
949    }
950
951    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
952    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
953
954    return rc;
955}
956
957/*==============================================================================
958 * FUNCTION   : updatePowerHint
959 *
960 * DESCRIPTION: update power hint based on whether it's video mode or not.
961 *
962 * PARAMETERS :
963 *   @bWasVideo : whether video mode before the switch
964 *   @bIsVideo  : whether new mode is video or not.
965 *
966 * RETURN     : NULL
967 *
968 *==========================================================================*/
969void QCamera3HardwareInterface::updatePowerHint(bool bWasVideo, bool bIsVideo)
970{
971#ifdef HAS_MULTIMEDIA_HINTS
972    if (bWasVideo == bIsVideo)
973        return;
974
975    if (m_pPowerModule && m_pPowerModule->powerHint) {
976        if (bIsVideo)
977            m_pPowerModule->powerHint(m_pPowerModule,
978                    POWER_HINT_VIDEO_ENCODE, (void *)"state=1");
979        else
980            m_pPowerModule->powerHint(m_pPowerModule,
981                    POWER_HINT_VIDEO_ENCODE, (void *)"state=0");
982     }
983#endif
984}
985
986/*===========================================================================
987 * FUNCTION   : configureStreams
988 *
989 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
990 *              and output streams.
991 *
992 * PARAMETERS :
993 *   @stream_list : streams to be configured
994 *
995 * RETURN     :
996 *
997 *==========================================================================*/
998int QCamera3HardwareInterface::configureStreams(
999        camera3_stream_configuration_t *streamList)
1000{
1001    ATRACE_CALL();
1002    int rc = 0;
1003    bool bWasVideo = m_bIsVideo;
1004    uint32_t numBuffers = MAX_INFLIGHT_REQUESTS;
1005
1006    // Sanity check stream_list
1007    if (streamList == NULL) {
1008        ALOGE("%s: NULL stream configuration", __func__);
1009        return BAD_VALUE;
1010    }
1011    if (streamList->streams == NULL) {
1012        ALOGE("%s: NULL stream list", __func__);
1013        return BAD_VALUE;
1014    }
1015
1016    if (streamList->num_streams < 1) {
1017        ALOGE("%s: Bad number of streams requested: %d", __func__,
1018                streamList->num_streams);
1019        return BAD_VALUE;
1020    }
1021
1022    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1023        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1024                MAX_NUM_STREAMS, streamList->num_streams);
1025        return BAD_VALUE;
1026    }
1027
1028    mOpMode = streamList->operation_mode;
1029    CDBG("%s: mOpMode: %d", __func__, mOpMode);
1030
1031    /* first invalidate all the steams in the mStreamList
1032     * if they appear again, they will be validated */
1033    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1034            it != mStreamInfo.end(); it++) {
1035        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1036        channel->stop();
1037        (*it)->status = INVALID;
1038    }
1039
1040    if (mRawDumpChannel) {
1041        mRawDumpChannel->stop();
1042        delete mRawDumpChannel;
1043        mRawDumpChannel = NULL;
1044    }
1045
1046    if (mSupportChannel)
1047        mSupportChannel->stop();
1048
1049    if (mAnalysisChannel) {
1050        mAnalysisChannel->stop();
1051    }
1052    if (mMetadataChannel) {
1053        /* If content of mStreamInfo is not 0, there is metadata stream */
1054        mMetadataChannel->stop();
1055    }
1056
1057    pthread_mutex_lock(&mMutex);
1058
1059    mPprocBypass = false;
1060    /* Check whether we have video stream */
1061    m_bIs4KVideo = false;
1062    m_bIsVideo = false;
1063    m_bEisSupportedSize = false;
1064    bool isZsl = false;
1065    uint32_t videoWidth = 0U;
1066    uint32_t videoHeight = 0U;
1067    size_t rawStreamCnt = 0;
1068    size_t stallStreamCnt = 0;
1069    size_t processedStreamCnt = 0;
1070    // Number of streams on ISP encoder path
1071    size_t numStreamsOnEncoder = 0;
1072    size_t numYuv888OnEncoder = 0;
1073    bool bYuv888OverrideJpeg = false;
1074    cam_dimension_t largeYuv888Size = {0, 0};
1075    cam_dimension_t maxViewfinderSize = {0, 0};
1076    bool bJpegExceeds4K = false;
1077    bool bUseCommonFeatureMask = false;
1078    uint32_t commonFeatureMask = 0;
1079    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1080    camera3_stream_t *inputStream = NULL;
1081    bool isJpeg = false;
1082    cam_dimension_t jpegSize = {0, 0};
1083
1084    /*EIS configuration*/
1085    bool eisSupported = false;
1086    bool oisSupported = false;
1087    int32_t margin_index = -1;
1088    uint8_t eis_prop_set;
1089    uint32_t maxEisWidth = 0;
1090    uint32_t maxEisHeight = 0;
1091    int32_t hal_version = CAM_HAL_V3;
1092
1093    memset(&mInputStreamSize, 0, sizeof(mInputStreamSize));
1094
1095    size_t count = IS_TYPE_MAX;
1096    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1097    for (size_t i = 0; i < count; i++) {
1098        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1099            eisSupported = true;
1100            margin_index = (int32_t)i;
1101            break;
1102        }
1103    }
1104
1105    count = CAM_OPT_STAB_MAX;
1106    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1107    for (size_t i = 0; i < count; i++) {
1108        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1109            oisSupported = true;
1110            break;
1111        }
1112    }
1113
1114    if (eisSupported) {
1115        maxEisWidth = MAX_EIS_WIDTH;
1116        maxEisHeight = MAX_EIS_HEIGHT;
1117    }
1118
1119    /* EIS setprop control */
1120    char eis_prop[PROPERTY_VALUE_MAX];
1121    memset(eis_prop, 0, sizeof(eis_prop));
1122    property_get("persist.camera.eis.enable", eis_prop, "0");
1123    eis_prop_set = (uint8_t)atoi(eis_prop);
1124
1125    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1126            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1127
1128    /* stream configurations */
1129    for (size_t i = 0; i < streamList->num_streams; i++) {
1130        camera3_stream_t *newStream = streamList->streams[i];
1131        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1132                "height = %d, rotation = %d, usage = 0x%x",
1133                __func__, i, newStream->stream_type, newStream->format,
1134                newStream->width, newStream->height, newStream->rotation,
1135                newStream->usage);
1136        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1137                newStream->stream_type == CAMERA3_STREAM_INPUT){
1138            isZsl = true;
1139        }
1140        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1141            inputStream = newStream;
1142        }
1143
1144        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1145            isJpeg = true;
1146            jpegSize.width = newStream->width;
1147            jpegSize.height = newStream->height;
1148            if (newStream->width > VIDEO_4K_WIDTH ||
1149                    newStream->height > VIDEO_4K_HEIGHT)
1150                bJpegExceeds4K = true;
1151        }
1152
1153        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1154                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1155            m_bIsVideo = true;
1156            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1157                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1158                videoWidth = newStream->width;
1159                videoHeight = newStream->height;
1160                m_bIs4KVideo = true;
1161            }
1162            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1163                                  (newStream->height <= maxEisHeight);
1164        }
1165        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1166                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1167            switch (newStream->format) {
1168            case HAL_PIXEL_FORMAT_BLOB:
1169                stallStreamCnt++;
1170                if (isOnEncoder(maxViewfinderSize, newStream->width,
1171                        newStream->height)) {
1172                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1173                    numStreamsOnEncoder++;
1174                }
1175                break;
1176            case HAL_PIXEL_FORMAT_RAW10:
1177            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1178            case HAL_PIXEL_FORMAT_RAW16:
1179                rawStreamCnt++;
1180                break;
1181            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1182                processedStreamCnt++;
1183                if (isOnEncoder(maxViewfinderSize, newStream->width,
1184                        newStream->height)) {
1185                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1186                            newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {
1187                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1188                    } else {
1189                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1190                    }
1191                    numStreamsOnEncoder++;
1192                }
1193                break;
1194            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1195                processedStreamCnt++;
1196                if (isOnEncoder(maxViewfinderSize, newStream->width,
1197                        newStream->height)) {
1198                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1199                    numStreamsOnEncoder++;
1200                    numYuv888OnEncoder++;
1201                    largeYuv888Size.width = newStream->width;
1202                    largeYuv888Size.height = newStream->height;
1203                }
1204                break;
1205            default:
1206                processedStreamCnt++;
1207                if (isOnEncoder(maxViewfinderSize, newStream->width,
1208                        newStream->height)) {
1209                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1210                    numStreamsOnEncoder++;
1211                }
1212                break;
1213            }
1214
1215        }
1216    }
1217
1218    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1219        !m_bIsVideo) {
1220        m_bEisEnable = false;
1221    }
1222
1223    /* Check if num_streams is sane */
1224    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1225            rawStreamCnt > MAX_RAW_STREAMS ||
1226            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1227        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1228                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1229        pthread_mutex_unlock(&mMutex);
1230        return -EINVAL;
1231    }
1232    /* Check whether we have zsl stream or 4k video case */
1233    if (isZsl && m_bIsVideo) {
1234        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1235        pthread_mutex_unlock(&mMutex);
1236        return -EINVAL;
1237    }
1238    /* Check if stream sizes are sane */
1239    if (numStreamsOnEncoder > 2) {
1240        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1241                __func__);
1242        pthread_mutex_unlock(&mMutex);
1243        return -EINVAL;
1244    } else if (1 < numStreamsOnEncoder){
1245        bUseCommonFeatureMask = true;
1246        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1247                __func__);
1248    }
1249
1250    /* Check if BLOB size is greater than 4k in 4k recording case */
1251    if (m_bIs4KVideo && bJpegExceeds4K) {
1252        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1253                __func__);
1254        pthread_mutex_unlock(&mMutex);
1255        return -EINVAL;
1256    }
1257
1258    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1259    // the YUV stream's size is greater or equal to the JPEG size, set common
1260    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1261    if (numYuv888OnEncoder && isJpeg &&
1262            largeYuv888Size.width >= jpegSize.width &&
1263            largeYuv888Size.height >= jpegSize.height) {
1264        bYuv888OverrideJpeg = true;
1265        commonFeatureMask = CAM_QCOM_FEATURE_NONE;
1266    }
1267
1268    rc = validateStreamDimensions(streamList);
1269    if (rc == NO_ERROR) {
1270        rc = validateStreamRotations(streamList);
1271    }
1272    if (rc != NO_ERROR) {
1273        ALOGE("%s: Invalid stream configuration requested!", __func__);
1274        pthread_mutex_unlock(&mMutex);
1275        return rc;
1276    }
1277
1278    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1279    camera3_stream_t *jpegStream = NULL;
1280    for (size_t i = 0; i < streamList->num_streams; i++) {
1281        camera3_stream_t *newStream = streamList->streams[i];
1282        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1283                "stream size : %d x %d, stream rotation = %d",
1284                __func__, newStream->stream_type, newStream->format,
1285                newStream->width, newStream->height, newStream->rotation);
1286        //if the stream is in the mStreamList validate it
1287        bool stream_exists = false;
1288        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1289                it != mStreamInfo.end(); it++) {
1290            if ((*it)->stream == newStream) {
1291                QCamera3ProcessingChannel *channel =
1292                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1293                stream_exists = true;
1294                if (channel)
1295                    delete channel;
1296                (*it)->status = VALID;
1297                (*it)->stream->priv = NULL;
1298                (*it)->channel = NULL;
1299            }
1300        }
1301        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1302            //new stream
1303            stream_info_t* stream_info;
1304            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1305            if (!stream_info) {
1306               ALOGE("%s: Could not allocate stream info", __func__);
1307               rc = -ENOMEM;
1308               pthread_mutex_unlock(&mMutex);
1309               return rc;
1310            }
1311            stream_info->stream = newStream;
1312            stream_info->status = VALID;
1313            stream_info->channel = NULL;
1314            mStreamInfo.push_back(stream_info);
1315        }
1316        /* Covers Opaque ZSL and API1 F/W ZSL */
1317        if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL
1318                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1319            if (zslStream != NULL) {
1320                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1321                pthread_mutex_unlock(&mMutex);
1322                return BAD_VALUE;
1323            }
1324            zslStream = newStream;
1325        }
1326        /* Covers YUV reprocess */
1327        if (inputStream != NULL) {
1328            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1329                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1330                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1331                    && inputStream->width == newStream->width
1332                    && inputStream->height == newStream->height) {
1333                if (zslStream != NULL) {
1334                    /* This scenario indicates multiple YUV streams with same size
1335                     * as input stream have been requested, since zsl stream handle
1336                     * is solely use for the purpose of overriding the size of streams
1337                     * which share h/w streams we will just make a guess here as to
1338                     * which of the stream is a ZSL stream, this will be refactored
1339                     * once we make generic logic for streams sharing encoder output
1340                     */
1341                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1342                }
1343                zslStream = newStream;
1344            }
1345        }
1346        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1347            jpegStream = newStream;
1348        }
1349    }
1350
1351    /* If a zsl stream is set, we know that we have configured at least one input or
1352       bidirectional stream */
1353    if (NULL != zslStream) {
1354        mInputStreamSize.width = (int32_t)zslStream->width;
1355        mInputStreamSize.height = (int32_t)zslStream->height;
1356        CDBG("%s: Input stream configured! %d x %d", __func__, mInputStreamSize.width,
1357                mInputStreamSize.height);
1358    }
1359
1360    cleanAndSortStreamInfo();
1361    if (mMetadataChannel) {
1362        delete mMetadataChannel;
1363        mMetadataChannel = NULL;
1364    }
1365    if (mSupportChannel) {
1366        delete mSupportChannel;
1367        mSupportChannel = NULL;
1368    }
1369
1370    if (mAnalysisChannel) {
1371        delete mAnalysisChannel;
1372        mAnalysisChannel = NULL;
1373    }
1374
1375    if (mDummyBatchChannel) {
1376        delete mDummyBatchChannel;
1377        mDummyBatchChannel = NULL;
1378    }
1379
1380    //Create metadata channel and initialize it
1381    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1382                    mCameraHandle->ops, captureResultCb,
1383                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1384    if (mMetadataChannel == NULL) {
1385        ALOGE("%s: failed to allocate metadata channel", __func__);
1386        rc = -ENOMEM;
1387        pthread_mutex_unlock(&mMutex);
1388        return rc;
1389    }
1390    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1391    if (rc < 0) {
1392        ALOGE("%s: metadata channel initialization failed", __func__);
1393        delete mMetadataChannel;
1394        mMetadataChannel = NULL;
1395        pthread_mutex_unlock(&mMutex);
1396        return rc;
1397    }
1398
1399    // Create analysis stream all the time, even when h/w support is not available
1400    // TODO: This is WAR. Need to enable analysis stream for HFR as well
1401    if (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) {
1402        mAnalysisChannel = new QCamera3SupportChannel(
1403                mCameraHandle->camera_handle,
1404                mCameraHandle->ops,
1405                &gCamCapability[mCameraId]->padding_info,
1406                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1407                CAM_STREAM_TYPE_ANALYSIS,
1408                &gCamCapability[mCameraId]->analysis_recommended_res,
1409                gCamCapability[mCameraId]->analysis_recommended_format,
1410                this,
1411                0); // force buffer count to 0
1412        if (!mAnalysisChannel) {
1413            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1414            pthread_mutex_unlock(&mMutex);
1415            return -ENOMEM;
1416        }
1417    }
1418
1419    bool isRawStreamRequested = false;
1420    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1421    /* Allocate channel objects for the requested streams */
1422    for (size_t i = 0; i < streamList->num_streams; i++) {
1423        camera3_stream_t *newStream = streamList->streams[i];
1424        uint32_t stream_usage = newStream->usage;
1425        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1426        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1427        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1428                || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) &&
1429            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1430            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1431            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1432        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1433                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1434        } else {
1435            //for non zsl streams find out the format
1436            switch (newStream->format) {
1437            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1438              {
1439                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1440                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1441                 } else {
1442                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1443                 }
1444                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1445                         = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1446
1447                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1448                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1449                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1450                             newStream->height;
1451                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1452                             newStream->width;
1453                 }
1454              }
1455              break;
1456           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1457              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1458              if (isOnEncoder(maxViewfinderSize, newStream->width,
1459                      newStream->height)) {
1460                  if (bUseCommonFeatureMask)
1461                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1462                              commonFeatureMask;
1463                  else
1464                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1465                              CAM_QCOM_FEATURE_NONE;
1466              } else {
1467                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1468                          CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1469              }
1470              if (CAM_QCOM_FEATURE_NONE ==
1471                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]) {
1472                  mPprocBypass = true;
1473              }
1474              break;
1475           case HAL_PIXEL_FORMAT_BLOB:
1476              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1477              if (m_bIs4KVideo && !isZsl) {
1478                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1479                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1480              } else {
1481                  if (bUseCommonFeatureMask &&
1482                          isOnEncoder(maxViewfinderSize, newStream->width,
1483                                  newStream->height)) {
1484                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1485                  } else {
1486                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1487                  }
1488              }
1489              if (isZsl) {
1490                  if (zslStream) {
1491                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1492                              (int32_t)zslStream->width;
1493                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1494                              (int32_t)zslStream->height;
1495                  } else {
1496                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1497                      pthread_mutex_unlock(&mMutex);
1498                      return -EINVAL;
1499                  }
1500              } else if (m_bIs4KVideo) {
1501                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1502                          (int32_t)videoWidth;
1503                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1504                          (int32_t)videoHeight;
1505              } else if (bYuv888OverrideJpeg) {
1506                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1507                          (int32_t)largeYuv888Size.width;
1508                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1509                          (int32_t)largeYuv888Size.height;
1510              }
1511              break;
1512           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1513           case HAL_PIXEL_FORMAT_RAW16:
1514           case HAL_PIXEL_FORMAT_RAW10:
1515              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1516              isRawStreamRequested = true;
1517              break;
1518           default:
1519              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1520              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1521              break;
1522            }
1523
1524        }
1525        if (newStream->priv == NULL) {
1526            //New stream, construct channel
1527            switch (newStream->stream_type) {
1528            case CAMERA3_STREAM_INPUT:
1529                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1530                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1531                break;
1532            case CAMERA3_STREAM_BIDIRECTIONAL:
1533                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1534                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1535                break;
1536            case CAMERA3_STREAM_OUTPUT:
1537                /* For video encoding stream, set read/write rarely
1538                 * flag so that they may be set to un-cached */
1539                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1540                    newStream->usage |=
1541                         (GRALLOC_USAGE_SW_READ_RARELY |
1542                         GRALLOC_USAGE_SW_WRITE_RARELY |
1543                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1544                else if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
1545                    CDBG("%s: ZSL usage flag skipping", __func__);
1546                else
1547                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1548                break;
1549            default:
1550                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1551                break;
1552            }
1553
1554            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1555                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1556                QCamera3ProcessingChannel *channel = NULL;
1557                switch (newStream->format) {
1558                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1559                    /* use higher number of buffers for HFR mode */
1560                    if((newStream->format ==
1561                            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) &&
1562                            (newStream->usage &
1563                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1564                            (streamList->operation_mode ==
1565                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1566                    ) {
1567                        numBuffers = MAX_INFLIGHT_REQUESTS * MAX_HFR_BATCH_SIZE;
1568                        ALOGI("%s: num video buffers in HFR mode: %d",
1569                                __func__, numBuffers);
1570                    }
1571                    /* Copy stream contents in HFR preview only case to create
1572                     * dummy batch channel */
1573                    if (!m_bIsVideo && (streamList->operation_mode ==
1574                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1575                        mDummyBatchStream = *newStream;
1576                    }
1577                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1578                            mCameraHandle->ops, captureResultCb,
1579                            &gCamCapability[mCameraId]->padding_info,
1580                            this,
1581                            newStream,
1582                            (cam_stream_type_t)
1583                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1584                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1585                            mMetadataChannel,
1586                            numBuffers);
1587                    if (channel == NULL) {
1588                        ALOGE("%s: allocation of channel failed", __func__);
1589                        pthread_mutex_unlock(&mMutex);
1590                        return -ENOMEM;
1591                    }
1592                    newStream->max_buffers = channel->getNumBuffers();
1593                    newStream->priv = channel;
1594                    break;
1595                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1596                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1597                            mCameraHandle->ops, captureResultCb,
1598                            &gCamCapability[mCameraId]->padding_info,
1599                            this,
1600                            newStream,
1601                            (cam_stream_type_t)
1602                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1603                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1604                            mMetadataChannel);
1605                    if (channel == NULL) {
1606                        ALOGE("%s: allocation of YUV channel failed", __func__);
1607                        pthread_mutex_unlock(&mMutex);
1608                        return -ENOMEM;
1609                    }
1610                    newStream->max_buffers = channel->getNumBuffers();
1611                    newStream->priv = channel;
1612                    break;
1613                }
1614                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1615                case HAL_PIXEL_FORMAT_RAW16:
1616                case HAL_PIXEL_FORMAT_RAW10:
1617                    mRawChannel = new QCamera3RawChannel(
1618                            mCameraHandle->camera_handle,
1619                            mCameraHandle->ops, captureResultCb,
1620                            &gCamCapability[mCameraId]->padding_info,
1621                            this, newStream, CAM_QCOM_FEATURE_NONE,
1622                            mMetadataChannel,
1623                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1624                    if (mRawChannel == NULL) {
1625                        ALOGE("%s: allocation of raw channel failed", __func__);
1626                        pthread_mutex_unlock(&mMutex);
1627                        return -ENOMEM;
1628                    }
1629                    newStream->max_buffers = mRawChannel->getNumBuffers();
1630                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1631                    break;
1632                case HAL_PIXEL_FORMAT_BLOB:
1633                    // Max live snapshot inflight buffer is 1. This is to mitigate
1634                    // frame drop issues for video snapshot. The more buffers being
1635                    // allocated, the more frame drops there are.
1636                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
1637                            mCameraHandle->ops, captureResultCb,
1638                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1639                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1640                            m_bIs4KVideo, mMetadataChannel,
1641                            (m_bIsVideo ? 1 : MAX_INFLIGHT_REQUESTS));
1642                    if (mPictureChannel == NULL) {
1643                        ALOGE("%s: allocation of channel failed", __func__);
1644                        pthread_mutex_unlock(&mMutex);
1645                        return -ENOMEM;
1646                    }
1647                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1648                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1649                    mPictureChannel->overrideYuvSize(
1650                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1651                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1652                    break;
1653
1654                default:
1655                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1656                    break;
1657                }
1658            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1659                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1660            } else {
1661                ALOGE("%s: Error, Unknown stream type", __func__);
1662                return -EINVAL;
1663            }
1664
1665            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1666                    it != mStreamInfo.end(); it++) {
1667                if ((*it)->stream == newStream) {
1668                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1669                    break;
1670                }
1671            }
1672        } else {
1673            // Channel already exists for this stream
1674            // Do nothing for now
1675        }
1676
1677    /* Do not add entries for input stream in metastream info
1678         * since there is no real stream associated with it
1679         */
1680        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1681            mStreamConfigInfo.num_streams++;
1682    }
1683
1684    //RAW DUMP channel
1685    if (mEnableRawDump && isRawStreamRequested == false){
1686        cam_dimension_t rawDumpSize;
1687        rawDumpSize = getMaxRawSize(mCameraId);
1688        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1689                                  mCameraHandle->ops,
1690                                  rawDumpSize,
1691                                  &gCamCapability[mCameraId]->padding_info,
1692                                  this, CAM_QCOM_FEATURE_NONE);
1693        if (!mRawDumpChannel) {
1694            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1695            pthread_mutex_unlock(&mMutex);
1696            return -ENOMEM;
1697        }
1698    }
1699
1700
1701    if (mAnalysisChannel) {
1702        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1703                gCamCapability[mCameraId]->analysis_recommended_res;
1704        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1705                CAM_STREAM_TYPE_ANALYSIS;
1706        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1707                CAM_QCOM_FEATURE_FACE_DETECTION;
1708        mStreamConfigInfo.num_streams++;
1709    }
1710
1711    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1712        mSupportChannel = new QCamera3SupportChannel(
1713                mCameraHandle->camera_handle,
1714                mCameraHandle->ops,
1715                &gCamCapability[mCameraId]->padding_info,
1716                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1717                CAM_STREAM_TYPE_CALLBACK,
1718                &QCamera3SupportChannel::kDim,
1719                CAM_FORMAT_YUV_420_NV21,
1720                this);
1721        if (!mSupportChannel) {
1722            ALOGE("%s: dummy channel cannot be created", __func__);
1723            pthread_mutex_unlock(&mMutex);
1724            return -ENOMEM;
1725        }
1726    }
1727
1728    if (mSupportChannel) {
1729        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1730                QCamera3SupportChannel::kDim;
1731        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1732                CAM_STREAM_TYPE_CALLBACK;
1733        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1734                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1735        mStreamConfigInfo.num_streams++;
1736    }
1737
1738    if (mRawDumpChannel) {
1739        cam_dimension_t rawSize;
1740        rawSize = getMaxRawSize(mCameraId);
1741        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1742                rawSize;
1743        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1744                CAM_STREAM_TYPE_RAW;
1745        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1746                CAM_QCOM_FEATURE_NONE;
1747        mStreamConfigInfo.num_streams++;
1748    }
1749    /* In HFR mode, if video stream is not added, create a dummy channel so that
1750     * ISP can create a batch mode even for preview only case. This channel is
1751     * never 'start'ed (no stream-on), it is only 'initialized'  */
1752    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1753            !m_bIsVideo) {
1754        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1755                mCameraHandle->ops, captureResultCb,
1756                &gCamCapability[mCameraId]->padding_info,
1757                this,
1758                &mDummyBatchStream,
1759                CAM_STREAM_TYPE_VIDEO,
1760                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1761                mMetadataChannel);
1762        if (NULL == mDummyBatchChannel) {
1763            ALOGE("%s: creation of mDummyBatchChannel failed."
1764                    "Preview will use non-hfr sensor mode ", __func__);
1765        }
1766    }
1767    if (mDummyBatchChannel) {
1768        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1769                mDummyBatchStream.width;
1770        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1771                mDummyBatchStream.height;
1772        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1773                CAM_STREAM_TYPE_VIDEO;
1774        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1775                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1776        mStreamConfigInfo.num_streams++;
1777    }
1778
1779    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1780    mStreamConfigInfo.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
1781
1782    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1783    for (pendingRequestIterator i = mPendingRequestsList.begin();
1784            i != mPendingRequestsList.end();) {
1785        i = erasePendingRequest(i);
1786    }
1787    mPendingFrameDropList.clear();
1788    // Initialize/Reset the pending buffers list
1789    mPendingBuffersMap.num_buffers = 0;
1790    mPendingBuffersMap.mPendingBufferList.clear();
1791    mPendingReprocessResultList.clear();
1792
1793    mFirstRequest = true;
1794    //Get min frame duration for this streams configuration
1795    deriveMinFrameDuration();
1796
1797    /* Turn on video hint only if video stream is configured */
1798    updatePowerHint(bWasVideo, m_bIsVideo);
1799
1800    pthread_mutex_unlock(&mMutex);
1801    return rc;
1802}
1803
1804/*===========================================================================
1805 * FUNCTION   : validateCaptureRequest
1806 *
1807 * DESCRIPTION: validate a capture request from camera service
1808 *
1809 * PARAMETERS :
1810 *   @request : request from framework to process
1811 *
1812 * RETURN     :
1813 *
1814 *==========================================================================*/
1815int QCamera3HardwareInterface::validateCaptureRequest(
1816                    camera3_capture_request_t *request)
1817{
1818    ssize_t idx = 0;
1819    const camera3_stream_buffer_t *b;
1820    CameraMetadata meta;
1821
1822    /* Sanity check the request */
1823    if (request == NULL) {
1824        ALOGE("%s: NULL capture request", __func__);
1825        return BAD_VALUE;
1826    }
1827
1828    if (request->settings == NULL && mFirstRequest) {
1829        /*settings cannot be null for the first request*/
1830        return BAD_VALUE;
1831    }
1832
1833    uint32_t frameNumber = request->frame_number;
1834    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1835        ALOGE("%s: Request %d: No output buffers provided!",
1836                __FUNCTION__, frameNumber);
1837        return BAD_VALUE;
1838    }
1839    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
1840        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
1841                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
1842        return BAD_VALUE;
1843    }
1844    if (request->input_buffer != NULL) {
1845        b = request->input_buffer;
1846        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1847            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1848                    __func__, frameNumber, (long)idx);
1849            return BAD_VALUE;
1850        }
1851        if (b->release_fence != -1) {
1852            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1853                    __func__, frameNumber, (long)idx);
1854            return BAD_VALUE;
1855        }
1856        if (b->buffer == NULL) {
1857            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1858                    __func__, frameNumber, (long)idx);
1859            return BAD_VALUE;
1860        }
1861    }
1862
1863    // Validate all buffers
1864    b = request->output_buffers;
1865    do {
1866        QCamera3ProcessingChannel *channel =
1867                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
1868        if (channel == NULL) {
1869            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1870                    __func__, frameNumber, (long)idx);
1871            return BAD_VALUE;
1872        }
1873        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1874            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1875                    __func__, frameNumber, (long)idx);
1876            return BAD_VALUE;
1877        }
1878        if (b->release_fence != -1) {
1879            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1880                    __func__, frameNumber, (long)idx);
1881            return BAD_VALUE;
1882        }
1883        if (b->buffer == NULL) {
1884            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1885                    __func__, frameNumber, (long)idx);
1886            return BAD_VALUE;
1887        }
1888        if (*(b->buffer) == NULL) {
1889            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
1890                    __func__, frameNumber, (long)idx);
1891            return BAD_VALUE;
1892        }
1893        idx++;
1894        b = request->output_buffers + idx;
1895    } while (idx < (ssize_t)request->num_output_buffers);
1896
1897    return NO_ERROR;
1898}
1899
1900/*===========================================================================
1901 * FUNCTION   : deriveMinFrameDuration
1902 *
1903 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
1904 *              on currently configured streams.
1905 *
1906 * PARAMETERS : NONE
1907 *
1908 * RETURN     : NONE
1909 *
1910 *==========================================================================*/
1911void QCamera3HardwareInterface::deriveMinFrameDuration()
1912{
1913    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
1914
1915    maxJpegDim = 0;
1916    maxProcessedDim = 0;
1917    maxRawDim = 0;
1918
1919    // Figure out maximum jpeg, processed, and raw dimensions
1920    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1921        it != mStreamInfo.end(); it++) {
1922
1923        // Input stream doesn't have valid stream_type
1924        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
1925            continue;
1926
1927        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
1928        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1929            if (dimension > maxJpegDim)
1930                maxJpegDim = dimension;
1931        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1932                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1933                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
1934            if (dimension > maxRawDim)
1935                maxRawDim = dimension;
1936        } else {
1937            if (dimension > maxProcessedDim)
1938                maxProcessedDim = dimension;
1939        }
1940    }
1941
1942    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
1943            MAX_SIZES_CNT);
1944
1945    //Assume all jpeg dimensions are in processed dimensions.
1946    if (maxJpegDim > maxProcessedDim)
1947        maxProcessedDim = maxJpegDim;
1948    //Find the smallest raw dimension that is greater or equal to jpeg dimension
1949    if (maxProcessedDim > maxRawDim) {
1950        maxRawDim = INT32_MAX;
1951
1952        for (size_t i = 0; i < count; i++) {
1953            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
1954                    gCamCapability[mCameraId]->raw_dim[i].height;
1955            if (dimension >= maxProcessedDim && dimension < maxRawDim)
1956                maxRawDim = dimension;
1957        }
1958    }
1959
1960    //Find minimum durations for processed, jpeg, and raw
1961    for (size_t i = 0; i < count; i++) {
1962        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
1963                gCamCapability[mCameraId]->raw_dim[i].height) {
1964            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
1965            break;
1966        }
1967    }
1968    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1969    for (size_t i = 0; i < count; i++) {
1970        if (maxProcessedDim ==
1971                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1972                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1973            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1974            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1975            break;
1976        }
1977    }
1978}
1979
1980/*===========================================================================
1981 * FUNCTION   : getMinFrameDuration
1982 *
1983 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1984 *              and current request configuration.
1985 *
1986 * PARAMETERS : @request: requset sent by the frameworks
1987 *
1988 * RETURN     : min farme duration for a particular request
1989 *
1990 *==========================================================================*/
1991int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1992{
1993    bool hasJpegStream = false;
1994    bool hasRawStream = false;
1995    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1996        const camera3_stream_t *stream = request->output_buffers[i].stream;
1997        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1998            hasJpegStream = true;
1999        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2000                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2001                stream->format == HAL_PIXEL_FORMAT_RAW16)
2002            hasRawStream = true;
2003    }
2004
2005    if (!hasJpegStream)
2006        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2007    else
2008        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2009}
2010
2011/*===========================================================================
2012 * FUNCTION   : handlePendingReprocResults
2013 *
2014 * DESCRIPTION: check and notify on any pending reprocess results
2015 *
2016 * PARAMETERS :
2017 *   @frame_number   : Pending request frame number
2018 *
2019 * RETURN     : int32_t type of status
2020 *              NO_ERROR  -- success
2021 *              none-zero failure code
2022 *==========================================================================*/
2023int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2024{
2025    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2026            j != mPendingReprocessResultList.end(); j++) {
2027        if (j->frame_number == frame_number) {
2028            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2029
2030            CDBG("%s: Delayed reprocess notify %d", __func__,
2031                    frame_number);
2032
2033            for (pendingRequestIterator k = mPendingRequestsList.begin();
2034                    k != mPendingRequestsList.end(); k++) {
2035
2036                if (k->frame_number == j->frame_number) {
2037                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2038                            "Take it out!!", __func__,
2039                            k->frame_number);
2040
2041                    camera3_capture_result result;
2042                    memset(&result, 0, sizeof(camera3_capture_result));
2043                    result.frame_number = frame_number;
2044                    result.num_output_buffers = 1;
2045                    result.output_buffers =  &j->buffer;
2046                    result.input_buffer = k->input_buffer;
2047                    result.result = k->settings;
2048                    result.partial_result = PARTIAL_RESULT_COUNT;
2049                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2050
2051                    erasePendingRequest(k);
2052                    mPendingRequest--;
2053                    break;
2054                }
2055            }
2056            mPendingReprocessResultList.erase(j);
2057            break;
2058        }
2059    }
2060    return NO_ERROR;
2061}
2062
2063/*===========================================================================
2064 * FUNCTION   : handleBatchMetadata
2065 *
2066 * DESCRIPTION: Handles metadata buffer callback in batch mode
2067 *
2068 * PARAMETERS : @metadata_buf: metadata buffer
2069 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2070 *                 the meta buf in this method
2071 *
2072 * RETURN     :
2073 *
2074 *==========================================================================*/
2075void QCamera3HardwareInterface::handleBatchMetadata(
2076        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2077{
2078    ATRACE_CALL();
2079
2080    if (NULL == metadata_buf) {
2081        ALOGE("%s: metadata_buf is NULL", __func__);
2082        return;
2083    }
2084    /* In batch mode, the metdata will contain the frame number and timestamp of
2085     * the last frame in the batch. Eg: a batch containing buffers from request
2086     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2087     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2088     * multiple process_capture_results */
2089    metadata_buffer_t *metadata =
2090            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2091    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2092    uint32_t last_frame_number, last_urgent_frame_number;
2093    uint32_t frame_number, urgent_frame_number = 0;
2094    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2095    bool invalid_metadata = false;
2096    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2097    size_t loopCount = 1;
2098
2099    int32_t *p_frame_number_valid =
2100            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2101    uint32_t *p_frame_number =
2102            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2103    int64_t *p_capture_time =
2104            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2105    int32_t *p_urgent_frame_number_valid =
2106            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2107    uint32_t *p_urgent_frame_number =
2108            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2109
2110    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2111            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2112            (NULL == p_urgent_frame_number)) {
2113        ALOGE("%s: Invalid metadata", __func__);
2114        invalid_metadata = true;
2115    } else {
2116        frame_number_valid = *p_frame_number_valid;
2117        last_frame_number = *p_frame_number;
2118        last_frame_capture_time = *p_capture_time;
2119        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2120        last_urgent_frame_number = *p_urgent_frame_number;
2121    }
2122
2123    // If reported capture_time is 0, skip handling this metadata
2124    if (!last_frame_capture_time) {
2125        goto done_batch_metadata;
2126    }
2127    /* In batchmode, when no video buffers are requested, set_parms are sent
2128     * for every capture_request. The difference between consecutive urgent
2129     * frame numbers and frame numbers should be used to interpolate the
2130     * corresponding frame numbers and time stamps */
2131    if (urgent_frame_number_valid) {
2132        /* Frame numbers start with 0, handle it in the else condition */
2133        if (last_urgent_frame_number &&
2134                (last_urgent_frame_number >= mPrevUrgentFrameNumber)) {
2135            urgentFrameNumDiff = last_urgent_frame_number - mPrevUrgentFrameNumber;
2136        } else {
2137            urgentFrameNumDiff = 1;
2138        }
2139        mPrevUrgentFrameNumber = last_urgent_frame_number;
2140    }
2141    if (frame_number_valid) {
2142        /* Frame numbers start with 0, handle it in the else condition */
2143        if(last_frame_number && (last_frame_number >= mPrevFrameNumber)) {
2144            frameNumDiff = last_frame_number - mPrevFrameNumber;
2145        } else {
2146            frameNumDiff = 1;
2147        }
2148        mPrevFrameNumber = last_frame_number;
2149    }
2150    if (urgent_frame_number_valid || frame_number_valid) {
2151        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2152    }
2153
2154    CDBG("%s: urgent_frm: valid: %d frm_num: %d previous frm_num: %d",
2155            __func__, urgent_frame_number_valid, last_urgent_frame_number,
2156            mPrevUrgentFrameNumber);
2157    CDBG("%s:        frm: valid: %d frm_num: %d previous frm_num:: %d",
2158            __func__, frame_number_valid, last_frame_number, mPrevFrameNumber);
2159
2160    //TODO: Need to ensure, metadata is not posted with the same frame numbers
2161    //when urgentFrameNumDiff != frameNumDiff
2162    for (size_t i = 0; i < loopCount; i++) {
2163        /* handleMetadataWithLock is called even for invalid_metadata for
2164         * pipeline depth calculation */
2165        if (!invalid_metadata) {
2166            /* Infer frame number. Batch metadata contains frame number of the
2167             * last frame */
2168            if (urgent_frame_number_valid) {
2169                if (i < urgentFrameNumDiff) {
2170                    urgent_frame_number =
2171                            last_urgent_frame_number + 1 - urgentFrameNumDiff + i;
2172                    CDBG("%s: inferred urgent frame_number: %d",
2173                            __func__, urgent_frame_number);
2174                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2175                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2176                } else {
2177                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2178                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2179                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2180                }
2181            }
2182
2183            /* Infer frame number. Batch metadata contains frame number of the
2184             * last frame */
2185            if (frame_number_valid) {
2186                if (i < frameNumDiff) {
2187                    frame_number = last_frame_number + 1 - frameNumDiff + i;
2188                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2189                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2190                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2191                } else {
2192                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2193                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2194                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2195                }
2196            }
2197
2198            //Infer timestamp
2199            first_frame_capture_time = last_frame_capture_time -
2200                    (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2201            capture_time =
2202                    first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2203            ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2204                    CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2205            CDBG("%s: batch capture_time: %lld, capture_time: %lld",
2206                    __func__, last_frame_capture_time, capture_time);
2207        }
2208        pthread_mutex_lock(&mMutex);
2209        handleMetadataWithLock(metadata_buf,
2210                false /* free_and_bufdone_meta_buf */);
2211        pthread_mutex_unlock(&mMutex);
2212    }
2213
2214done_batch_metadata:
2215    /* BufDone metadata buffer */
2216    if (free_and_bufdone_meta_buf) {
2217        mMetadataChannel->bufDone(metadata_buf);
2218        free(metadata_buf);
2219    }
2220}
2221
2222/*===========================================================================
2223 * FUNCTION   : handleMetadataWithLock
2224 *
2225 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2226 *
2227 * PARAMETERS : @metadata_buf: metadata buffer
2228 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2229 *                 the meta buf in this method
2230 *
2231 * RETURN     :
2232 *
2233 *==========================================================================*/
2234void QCamera3HardwareInterface::handleMetadataWithLock(
2235    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2236{
2237    ATRACE_CALL();
2238
2239    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2240    int32_t frame_number_valid, urgent_frame_number_valid;
2241    uint32_t frame_number, urgent_frame_number;
2242    int64_t capture_time;
2243
2244    int32_t *p_frame_number_valid =
2245            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2246    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2247    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2248    int32_t *p_urgent_frame_number_valid =
2249            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2250    uint32_t *p_urgent_frame_number =
2251            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2252    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2253            metadata) {
2254        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2255                __func__, *p_frame_number_valid, *p_frame_number);
2256    }
2257
2258    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2259            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2260        ALOGE("%s: Invalid metadata", __func__);
2261        if (free_and_bufdone_meta_buf) {
2262            mMetadataChannel->bufDone(metadata_buf);
2263            free(metadata_buf);
2264        }
2265        goto done_metadata;
2266    } else {
2267        frame_number_valid = *p_frame_number_valid;
2268        frame_number = *p_frame_number;
2269        capture_time = *p_capture_time;
2270        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2271        urgent_frame_number = *p_urgent_frame_number;
2272    }
2273    //Partial result on process_capture_result for timestamp
2274    if (urgent_frame_number_valid) {
2275        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2276          __func__, urgent_frame_number, capture_time);
2277
2278        //Recieved an urgent Frame Number, handle it
2279        //using partial results
2280        for (pendingRequestIterator i =
2281                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2282            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2283                __func__, i->frame_number, urgent_frame_number);
2284
2285            if (i->frame_number < urgent_frame_number &&
2286                i->partial_result_cnt == 0) {
2287                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2288                    __func__, i->frame_number);
2289            }
2290
2291            if (i->frame_number == urgent_frame_number &&
2292                     i->bUrgentReceived == 0) {
2293
2294                camera3_capture_result_t result;
2295                memset(&result, 0, sizeof(camera3_capture_result_t));
2296
2297                i->partial_result_cnt++;
2298                i->bUrgentReceived = 1;
2299                // Extract 3A metadata
2300                result.result =
2301                    translateCbUrgentMetadataToResultMetadata(metadata);
2302                // Populate metadata result
2303                result.frame_number = urgent_frame_number;
2304                result.num_output_buffers = 0;
2305                result.output_buffers = NULL;
2306                result.partial_result = i->partial_result_cnt;
2307
2308                mCallbackOps->process_capture_result(mCallbackOps, &result);
2309                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2310                     __func__, result.frame_number, capture_time);
2311                free_camera_metadata((camera_metadata_t *)result.result);
2312                break;
2313            }
2314        }
2315    }
2316
2317    if (!frame_number_valid) {
2318        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2319        if (free_and_bufdone_meta_buf) {
2320            mMetadataChannel->bufDone(metadata_buf);
2321            free(metadata_buf);
2322        }
2323        goto done_metadata;
2324    }
2325    CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2326            frame_number, capture_time);
2327
2328    for (pendingRequestIterator i = mPendingRequestsList.begin();
2329            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2330        camera3_capture_result_t result;
2331        memset(&result, 0, sizeof(camera3_capture_result_t));
2332
2333        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2334        i->partial_result_cnt++;
2335        result.partial_result = i->partial_result_cnt;
2336
2337        // Flush out all entries with less or equal frame numbers.
2338        mPendingRequest--;
2339
2340        // Check whether any stream buffer corresponding to this is dropped or not
2341        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2342        // The API does not expect a blob buffer to be dropped
2343        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2344            /* Clear notify_msg structure */
2345            camera3_notify_msg_t notify_msg;
2346            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2347            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2348                    j != i->buffers.end(); j++) {
2349               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2350                   QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2351                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2352                   for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2353                       if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2354                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2355                           CDBG("%s: Start of reporting error frame#=%u, streamID=%u",
2356                                   __func__, i->frame_number, streamID);
2357                           notify_msg.type = CAMERA3_MSG_ERROR;
2358                           notify_msg.message.error.frame_number = i->frame_number;
2359                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2360                           notify_msg.message.error.error_stream = j->stream;
2361                           mCallbackOps->notify(mCallbackOps, &notify_msg);
2362                           CDBG("%s: End of reporting error frame#=%u, streamID=%u",
2363                                  __func__, i->frame_number, streamID);
2364                           PendingFrameDropInfo PendingFrameDrop;
2365                           PendingFrameDrop.frame_number=i->frame_number;
2366                           PendingFrameDrop.stream_ID = streamID;
2367                           // Add the Frame drop info to mPendingFrameDropList
2368                           mPendingFrameDropList.push_back(PendingFrameDrop);
2369                      }
2370                   }
2371               }
2372            }
2373        }
2374
2375        //TODO: batch handling for dropped metadata
2376
2377        // Send empty metadata with already filled buffers for dropped metadata
2378        // and send valid metadata with already filled buffers for current metadata
2379        if (i->frame_number < frame_number) {
2380            /* Clear notify_msg structure */
2381            camera3_notify_msg_t notify_msg;
2382            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2383
2384            notify_msg.type = CAMERA3_MSG_SHUTTER;
2385            notify_msg.message.shutter.frame_number = i->frame_number;
2386            notify_msg.message.shutter.timestamp = (uint64_t)capture_time -
2387                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
2388            mCallbackOps->notify(mCallbackOps, &notify_msg);
2389            i->timestamp = (nsecs_t)notify_msg.message.shutter.timestamp;
2390            CDBG("%s: Support notification !!!! notify frame_number = %u, capture_time = %llu",
2391                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2392
2393            CameraMetadata dummyMetadata;
2394            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
2395                    &i->timestamp, 1);
2396            dummyMetadata.update(ANDROID_REQUEST_ID,
2397                    &(i->request_id), 1);
2398            result.result = dummyMetadata.release();
2399        } else {
2400            /* Clear notify_msg structure */
2401            camera3_notify_msg_t notify_msg;
2402            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2403
2404            // Send shutter notify to frameworks
2405            notify_msg.type = CAMERA3_MSG_SHUTTER;
2406            notify_msg.message.shutter.frame_number = i->frame_number;
2407            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2408            mCallbackOps->notify(mCallbackOps, &notify_msg);
2409
2410            i->timestamp = capture_time;
2411
2412            result.result = translateFromHalMetadata(metadata,
2413                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2414                    i->capture_intent);
2415
2416            saveExifParams(metadata);
2417
2418            if (i->blob_request) {
2419                {
2420                    //Dump tuning metadata if enabled and available
2421                    char prop[PROPERTY_VALUE_MAX];
2422                    memset(prop, 0, sizeof(prop));
2423                    property_get("persist.camera.dumpmetadata", prop, "0");
2424                    int32_t enabled = atoi(prop);
2425                    if (enabled && metadata->is_tuning_params_valid) {
2426                        dumpMetadataToFile(metadata->tuning_params,
2427                               mMetaFrameCount,
2428                               enabled,
2429                               "Snapshot",
2430                               frame_number);
2431                    }
2432                }
2433            }
2434
2435            // Find channel requiring metadata, and queue the current metadata.
2436            //TODO: for now, we don't support two streams requiring metadata at the same time.
2437            // (because we are not making copies, and metadata buffer is not reference counted.
2438            pendingBufferIterator iter = i->buffers.begin();
2439            while (iter != i->buffers.end() && !iter->need_metadata)
2440                iter++;
2441            if (iter == i->buffers.end()) {
2442                CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2443                // Return metadata buffer
2444                if (free_and_bufdone_meta_buf) {
2445                    mMetadataChannel->bufDone(metadata_buf);
2446                    free(metadata_buf);
2447                }
2448            } else {
2449                CDBG("%s: need_metadata is set for this metadata", __func__);
2450                QCamera3ProcessingChannel *channel =
2451                        (QCamera3ProcessingChannel *)iter->stream->priv;
2452                channel->queueReprocMetadata(metadata_buf);
2453            }
2454        }
2455        if (!result.result) {
2456            ALOGE("%s: metadata is NULL", __func__);
2457        }
2458        result.frame_number = i->frame_number;
2459        result.input_buffer = i->input_buffer;
2460        result.num_output_buffers = 0;
2461        result.output_buffers = NULL;
2462        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2463                    j != i->buffers.end(); j++) {
2464            if (j->buffer) {
2465                result.num_output_buffers++;
2466            }
2467        }
2468
2469        if (result.num_output_buffers > 0) {
2470            camera3_stream_buffer_t *result_buffers =
2471                new camera3_stream_buffer_t[result.num_output_buffers];
2472            if (!result_buffers) {
2473                ALOGE("%s: Fatal error: out of memory", __func__);
2474            }
2475            size_t result_buffers_idx = 0;
2476            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2477                    j != i->buffers.end(); j++) {
2478                if (j->buffer) {
2479                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2480                            m != mPendingFrameDropList.end(); m++) {
2481                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2482                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2483                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2484                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2485                            CDBG("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2486                                  __func__, frame_number, streamID);
2487                            m = mPendingFrameDropList.erase(m);
2488                            break;
2489                        }
2490                    }
2491
2492                    for (List<PendingBufferInfo>::iterator k =
2493                      mPendingBuffersMap.mPendingBufferList.begin();
2494                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2495                      if (k->buffer == j->buffer->buffer) {
2496                        CDBG("%s: Found buffer %p in pending buffer List "
2497                              "for frame %u, Take it out!!", __func__,
2498                               k->buffer, k->frame_number);
2499                        mPendingBuffersMap.num_buffers--;
2500                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2501                        break;
2502                      }
2503                    }
2504
2505                    result_buffers[result_buffers_idx++] = *(j->buffer);
2506                    free(j->buffer);
2507                    j->buffer = NULL;
2508                }
2509            }
2510            result.output_buffers = result_buffers;
2511            mCallbackOps->process_capture_result(mCallbackOps, &result);
2512            CDBG("%s: meta frame_number = %u, capture_time = %lld",
2513                    __func__, result.frame_number, i->timestamp);
2514            free_camera_metadata((camera_metadata_t *)result.result);
2515            delete[] result_buffers;
2516        } else {
2517            mCallbackOps->process_capture_result(mCallbackOps, &result);
2518            CDBG("%s: meta frame_number = %u, capture_time = %lld",
2519                        __func__, result.frame_number, i->timestamp);
2520            free_camera_metadata((camera_metadata_t *)result.result);
2521        }
2522        // erase the element from the list
2523        i = erasePendingRequest(i);
2524
2525        if (!mPendingReprocessResultList.empty()) {
2526            handlePendingReprocResults(frame_number + 1);
2527        }
2528    }
2529
2530done_metadata:
2531    for (pendingRequestIterator i = mPendingRequestsList.begin();
2532            i != mPendingRequestsList.end() ;i++) {
2533        i->pipeline_depth++;
2534    }
2535    unblockRequestIfNecessary();
2536
2537}
2538
2539/*===========================================================================
2540 * FUNCTION   : handleBufferWithLock
2541 *
2542 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2543 *
2544 * PARAMETERS : @buffer: image buffer for the callback
2545 *              @frame_number: frame number of the image buffer
2546 *
2547 * RETURN     :
2548 *
2549 *==========================================================================*/
2550void QCamera3HardwareInterface::handleBufferWithLock(
2551    camera3_stream_buffer_t *buffer, uint32_t frame_number)
2552{
2553    ATRACE_CALL();
2554    // If the frame number doesn't exist in the pending request list,
2555    // directly send the buffer to the frameworks, and update pending buffers map
2556    // Otherwise, book-keep the buffer.
2557    pendingRequestIterator i = mPendingRequestsList.begin();
2558    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2559        i++;
2560    }
2561    if (i == mPendingRequestsList.end()) {
2562        // Verify all pending requests frame_numbers are greater
2563        for (pendingRequestIterator j = mPendingRequestsList.begin();
2564                j != mPendingRequestsList.end(); j++) {
2565            if (j->frame_number < frame_number) {
2566                ALOGE("%s: Error: pending frame number %d is smaller than %d",
2567                        __func__, j->frame_number, frame_number);
2568            }
2569        }
2570        camera3_capture_result_t result;
2571        memset(&result, 0, sizeof(camera3_capture_result_t));
2572        result.result = NULL;
2573        result.frame_number = frame_number;
2574        result.num_output_buffers = 1;
2575        result.partial_result = 0;
2576        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2577                m != mPendingFrameDropList.end(); m++) {
2578            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2579            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2580            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2581                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2582                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2583                        __func__, frame_number, streamID);
2584                m = mPendingFrameDropList.erase(m);
2585                break;
2586            }
2587        }
2588        result.output_buffers = buffer;
2589        CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
2590                __func__, frame_number, buffer->buffer);
2591
2592        for (List<PendingBufferInfo>::iterator k =
2593                mPendingBuffersMap.mPendingBufferList.begin();
2594                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2595            if (k->buffer == buffer->buffer) {
2596                CDBG("%s: Found Frame buffer, take it out from list",
2597                        __func__);
2598
2599                mPendingBuffersMap.num_buffers--;
2600                k = mPendingBuffersMap.mPendingBufferList.erase(k);
2601                break;
2602            }
2603        }
2604        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2605            __func__, mPendingBuffersMap.num_buffers);
2606
2607        mCallbackOps->process_capture_result(mCallbackOps, &result);
2608    } else {
2609        if (i->input_buffer) {
2610            CameraMetadata settings;
2611            camera3_notify_msg_t notify_msg;
2612            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2613            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2614            if(i->settings) {
2615                settings = i->settings;
2616                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2617                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2618                } else {
2619                    ALOGE("%s: No timestamp in input settings! Using current one.",
2620                            __func__);
2621                }
2622            } else {
2623                ALOGE("%s: Input settings missing!", __func__);
2624            }
2625
2626            notify_msg.type = CAMERA3_MSG_SHUTTER;
2627            notify_msg.message.shutter.frame_number = frame_number;
2628            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2629
2630            if (i->input_buffer->release_fence != -1) {
2631               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2632               close(i->input_buffer->release_fence);
2633               if (rc != OK) {
2634               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2635               }
2636            }
2637
2638            for (List<PendingBufferInfo>::iterator k =
2639                    mPendingBuffersMap.mPendingBufferList.begin();
2640                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2641                if (k->buffer == buffer->buffer) {
2642                    CDBG("%s: Found Frame buffer, take it out from list",
2643                            __func__);
2644
2645                    mPendingBuffersMap.num_buffers--;
2646                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2647                    break;
2648                }
2649            }
2650            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2651                __func__, mPendingBuffersMap.num_buffers);
2652
2653            bool notifyNow = true;
2654            for (pendingRequestIterator j = mPendingRequestsList.begin();
2655                    j != mPendingRequestsList.end(); j++) {
2656                if (j->frame_number < frame_number) {
2657                    notifyNow = false;
2658                    break;
2659                }
2660            }
2661
2662            if (notifyNow) {
2663                camera3_capture_result result;
2664                memset(&result, 0, sizeof(camera3_capture_result));
2665                result.frame_number = frame_number;
2666                result.result = i->settings;
2667                result.input_buffer = i->input_buffer;
2668                result.num_output_buffers = 1;
2669                result.output_buffers = buffer;
2670                result.partial_result = PARTIAL_RESULT_COUNT;
2671
2672                mCallbackOps->notify(mCallbackOps, &notify_msg);
2673                mCallbackOps->process_capture_result(mCallbackOps, &result);
2674                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2675                i = erasePendingRequest(i);
2676                mPendingRequest--;
2677            } else {
2678                // Cache reprocess result for later
2679                PendingReprocessResult pendingResult;
2680                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2681                pendingResult.notify_msg = notify_msg;
2682                pendingResult.buffer = *buffer;
2683                pendingResult.frame_number = frame_number;
2684                mPendingReprocessResultList.push_back(pendingResult);
2685                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2686            }
2687        } else {
2688            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2689                j != i->buffers.end(); j++) {
2690                if (j->stream == buffer->stream) {
2691                    if (j->buffer != NULL) {
2692                        ALOGE("%s: Error: buffer is already set", __func__);
2693                    } else {
2694                        j->buffer = (camera3_stream_buffer_t *)malloc(
2695                            sizeof(camera3_stream_buffer_t));
2696                        *(j->buffer) = *buffer;
2697                        CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
2698                            __func__, buffer, frame_number);
2699                    }
2700                }
2701            }
2702        }
2703    }
2704}
2705
2706/*===========================================================================
2707 * FUNCTION   : unblockRequestIfNecessary
2708 *
2709 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2710 *              that mMutex is held when this function is called.
2711 *
2712 * PARAMETERS :
2713 *
2714 * RETURN     :
2715 *
2716 *==========================================================================*/
2717void QCamera3HardwareInterface::unblockRequestIfNecessary()
2718{
2719   // Unblock process_capture_request
2720   pthread_cond_signal(&mRequestCond);
2721}
2722
2723
2724/*===========================================================================
2725 * FUNCTION   : processCaptureRequest
2726 *
2727 * DESCRIPTION: process a capture request from camera service
2728 *
2729 * PARAMETERS :
2730 *   @request : request from framework to process
2731 *
2732 * RETURN     :
2733 *
2734 *==========================================================================*/
2735int QCamera3HardwareInterface::processCaptureRequest(
2736                    camera3_capture_request_t *request)
2737{
2738    ATRACE_CALL();
2739    int rc = NO_ERROR;
2740    int32_t request_id;
2741    CameraMetadata meta;
2742    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
2743    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
2744    bool isVidBufRequested = false;
2745    camera3_stream_buffer_t *pInputBuffer;
2746
2747    pthread_mutex_lock(&mMutex);
2748
2749    rc = validateCaptureRequest(request);
2750    if (rc != NO_ERROR) {
2751        ALOGE("%s: incoming request is not valid", __func__);
2752        pthread_mutex_unlock(&mMutex);
2753        return rc;
2754    }
2755
2756    meta = request->settings;
2757
2758    // For first capture request, send capture intent, and
2759    // stream on all streams
2760    if (mFirstRequest) {
2761        // send an unconfigure to the backend so that the isp
2762        // resources are deallocated
2763        if (!mFirstConfiguration) {
2764            cam_stream_size_info_t stream_config_info;
2765            int32_t hal_version = CAM_HAL_V3;
2766            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
2767            stream_config_info.buffer_info.min_buffers =
2768                    MIN_INFLIGHT_REQUESTS;
2769            stream_config_info.buffer_info.max_buffers =
2770                    MAX_INFLIGHT_REQUESTS;
2771            clear_metadata_buffer(mParameters);
2772            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2773                    CAM_INTF_PARM_HAL_VERSION, hal_version);
2774            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2775                    CAM_INTF_META_STREAM_INFO, stream_config_info);
2776            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2777                    mParameters);
2778            if (rc < 0) {
2779                ALOGE("%s: set_parms for unconfigure failed", __func__);
2780                pthread_mutex_unlock(&mMutex);
2781                return rc;
2782            }
2783        }
2784
2785        /* get eis information for stream configuration */
2786        cam_is_type_t is_type;
2787        char is_type_value[PROPERTY_VALUE_MAX];
2788        property_get("persist.camera.is_type", is_type_value, "0");
2789        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2790
2791        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2792            int32_t hal_version = CAM_HAL_V3;
2793            uint8_t captureIntent =
2794                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2795            mCaptureIntent = captureIntent;
2796            clear_metadata_buffer(mParameters);
2797            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
2798            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
2799        }
2800
2801        //If EIS is enabled, turn it on for video
2802        bool setEis = m_bEisEnable && m_bEisSupportedSize;
2803        int32_t vsMode;
2804        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
2805        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
2806            rc = BAD_VALUE;
2807        }
2808
2809        //IS type will be 0 unless EIS is supported. If EIS is supported
2810        //it could either be 1 or 4 depending on the stream and video size
2811        if (setEis) {
2812            if (!m_bEisSupportedSize) {
2813                is_type = IS_TYPE_DIS;
2814            } else {
2815                is_type = IS_TYPE_EIS_2_0;
2816            }
2817            mStreamConfigInfo.is_type = is_type;
2818        } else {
2819            mStreamConfigInfo.is_type = IS_TYPE_NONE;
2820        }
2821
2822        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2823                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
2824        int32_t tintless_value = 1;
2825        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2826                CAM_INTF_PARM_TINTLESS, tintless_value);
2827        //Disable CDS for HFR mode and if mPprocBypass = true.
2828        //CDS is a session parameter in the backend/ISP, so need to be set/reset
2829        //after every configure_stream
2830        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
2831                mPprocBypass) {
2832            int32_t cds = CAM_CDS_MODE_OFF;
2833            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2834                    CAM_INTF_PARM_CDS_MODE, cds))
2835                ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
2836
2837        }
2838        setMobicat();
2839
2840        /* Set fps and hfr mode while sending meta stream info so that sensor
2841         * can configure appropriate streaming mode */
2842        mHFRVideoFps = DEFAULT_VIDEO_FPS;
2843        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2844            rc = setHalFpsRange(meta, mParameters);
2845            if (rc != NO_ERROR) {
2846                ALOGE("%s: setHalFpsRange failed", __func__);
2847            }
2848        }
2849        if (meta.exists(ANDROID_CONTROL_MODE)) {
2850            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
2851            rc = extractSceneMode(meta, metaMode, mParameters);
2852            if (rc != NO_ERROR) {
2853                ALOGE("%s: extractSceneMode failed", __func__);
2854            }
2855        }
2856
2857        //TODO: validate the arguments, HSV scenemode should have only the
2858        //advertised fps ranges
2859
2860        /*set the capture intent, hal version, tintless, stream info,
2861         *and disenable parameters to the backend*/
2862        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
2863        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2864                    mParameters);
2865
2866        cam_dimension_t sensor_dim;
2867        memset(&sensor_dim, 0, sizeof(sensor_dim));
2868        rc = getSensorOutputSize(sensor_dim);
2869        if (rc != NO_ERROR) {
2870            ALOGE("%s: Failed to get sensor output size", __func__);
2871            pthread_mutex_unlock(&mMutex);
2872            return rc;
2873        }
2874
2875        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
2876                gCamCapability[mCameraId]->active_array_size.height,
2877                sensor_dim.width, sensor_dim.height);
2878
2879        /* Set batchmode before initializing channel. Since registerBuffer
2880         * internally initializes some of the channels, better set batchmode
2881         * even before first register buffer */
2882        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2883            it != mStreamInfo.end(); it++) {
2884            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2885            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
2886                    && mBatchSize) {
2887                rc = channel->setBatchSize(mBatchSize);
2888                //Disable per frame map unmap for HFR/batchmode case
2889                rc |= channel->setPerFrameMapUnmap(false);
2890                if (NO_ERROR != rc) {
2891                    ALOGE("%s : Channel init failed %d", __func__, rc);
2892                    pthread_mutex_unlock(&mMutex);
2893                    return rc;
2894                }
2895            }
2896        }
2897
2898        for (size_t i = 0; i < request->num_output_buffers; i++) {
2899            const camera3_stream_buffer_t& output = request->output_buffers[i];
2900            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2901            /*for livesnapshot stream is_type will be DIS*/
2902            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
2903               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
2904               setEis)
2905                rc = channel->registerBuffer(output.buffer, is_type);
2906            else
2907                rc = channel->registerBuffer(output.buffer, IS_TYPE_NONE);
2908
2909            if (rc < 0) {
2910                ALOGE("%s: registerBuffer failed",
2911                        __func__);
2912                pthread_mutex_unlock(&mMutex);
2913                return -ENODEV;
2914            }
2915        }
2916
2917        //First initialize all streams
2918        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2919            it != mStreamInfo.end(); it++) {
2920            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2921            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
2922               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
2923               setEis)
2924                rc = channel->initialize(is_type);
2925            else {
2926                rc = channel->initialize(IS_TYPE_NONE);
2927            }
2928            if (NO_ERROR != rc) {
2929                ALOGE("%s : Channel initialization failed %d", __func__, rc);
2930                pthread_mutex_unlock(&mMutex);
2931                return rc;
2932            }
2933        }
2934
2935        if (mRawDumpChannel) {
2936            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
2937            if (rc != NO_ERROR) {
2938                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
2939                pthread_mutex_unlock(&mMutex);
2940                return rc;
2941            }
2942        }
2943        if (mSupportChannel) {
2944            rc = mSupportChannel->initialize(IS_TYPE_NONE);
2945            if (rc < 0) {
2946                ALOGE("%s: Support channel initialization failed", __func__);
2947                pthread_mutex_unlock(&mMutex);
2948                return rc;
2949            }
2950        }
2951        if (mAnalysisChannel) {
2952            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
2953            if (rc < 0) {
2954                ALOGE("%s: Analysis channel initialization failed", __func__);
2955                pthread_mutex_unlock(&mMutex);
2956                return rc;
2957            }
2958        }
2959        if (mDummyBatchChannel) {
2960            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
2961            if (rc < 0) {
2962                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
2963                pthread_mutex_unlock(&mMutex);
2964                return rc;
2965            }
2966            rc = mDummyBatchChannel->initialize(is_type);
2967            if (rc < 0) {
2968                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
2969                pthread_mutex_unlock(&mMutex);
2970                return rc;
2971            }
2972        }
2973
2974        //Then start them.
2975        CDBG_HIGH("%s: Start META Channel", __func__);
2976        rc = mMetadataChannel->start();
2977        if (rc < 0) {
2978            ALOGE("%s: META channel start failed", __func__);
2979            pthread_mutex_unlock(&mMutex);
2980            return rc;
2981        }
2982
2983        if (mAnalysisChannel) {
2984            rc = mAnalysisChannel->start();
2985            if (rc < 0) {
2986                ALOGE("%s: Analysis channel start failed", __func__);
2987                mMetadataChannel->stop();
2988                pthread_mutex_unlock(&mMutex);
2989                return rc;
2990            }
2991        }
2992
2993        if (mSupportChannel) {
2994            rc = mSupportChannel->start();
2995            if (rc < 0) {
2996                ALOGE("%s: Support channel start failed", __func__);
2997                mMetadataChannel->stop();
2998                /* Although support and analysis are mutually exclusive today
2999                   adding it in anycase for future proofing */
3000                if (mAnalysisChannel) {
3001                    mAnalysisChannel->stop();
3002                }
3003                pthread_mutex_unlock(&mMutex);
3004                return rc;
3005            }
3006        }
3007        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3008            it != mStreamInfo.end(); it++) {
3009            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3010            CDBG_HIGH("%s: Start Processing Channel mask=%d",
3011                    __func__, channel->getStreamTypeMask());
3012            rc = channel->start();
3013            if (rc < 0) {
3014                ALOGE("%s: channel start failed", __func__);
3015                pthread_mutex_unlock(&mMutex);
3016                return rc;
3017            }
3018        }
3019
3020        if (mRawDumpChannel) {
3021            CDBG("%s: Starting raw dump stream",__func__);
3022            rc = mRawDumpChannel->start();
3023            if (rc != NO_ERROR) {
3024                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3025                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3026                      it != mStreamInfo.end(); it++) {
3027                    QCamera3Channel *channel =
3028                        (QCamera3Channel *)(*it)->stream->priv;
3029                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3030                        channel->getStreamTypeMask());
3031                    channel->stop();
3032                }
3033                if (mSupportChannel)
3034                    mSupportChannel->stop();
3035                if (mAnalysisChannel) {
3036                    mAnalysisChannel->stop();
3037                }
3038                mMetadataChannel->stop();
3039                pthread_mutex_unlock(&mMutex);
3040                return rc;
3041            }
3042        }
3043        mWokenUpByDaemon = false;
3044        mPendingRequest = 0;
3045        mFirstConfiguration = false;
3046    }
3047
3048    uint32_t frameNumber = request->frame_number;
3049    cam_stream_ID_t streamID;
3050
3051    if (meta.exists(ANDROID_REQUEST_ID)) {
3052        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3053        mCurrentRequestId = request_id;
3054        CDBG("%s: Received request with id: %d",__func__, request_id);
3055    } else if (mFirstRequest || mCurrentRequestId == -1){
3056        ALOGE("%s: Unable to find request id field, \
3057                & no previous id available", __func__);
3058        pthread_mutex_unlock(&mMutex);
3059        return NAME_NOT_FOUND;
3060    } else {
3061        CDBG("%s: Re-using old request id", __func__);
3062        request_id = mCurrentRequestId;
3063    }
3064
3065    CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3066                                    __func__, __LINE__,
3067                                    request->num_output_buffers,
3068                                    request->input_buffer,
3069                                    frameNumber);
3070    // Acquire all request buffers first
3071    streamID.num_streams = 0;
3072    int blob_request = 0;
3073    uint32_t snapshotStreamId = 0;
3074    for (size_t i = 0; i < request->num_output_buffers; i++) {
3075        const camera3_stream_buffer_t& output = request->output_buffers[i];
3076        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3077
3078        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3079            //Call function to store local copy of jpeg data for encode params.
3080            blob_request = 1;
3081            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3082        }
3083
3084        if (output.acquire_fence != -1) {
3085           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3086           close(output.acquire_fence);
3087           if (rc != OK) {
3088              ALOGE("%s: sync wait failed %d", __func__, rc);
3089              pthread_mutex_unlock(&mMutex);
3090              return rc;
3091           }
3092        }
3093
3094        streamID.streamID[streamID.num_streams] =
3095            channel->getStreamID(channel->getStreamTypeMask());
3096        streamID.num_streams++;
3097
3098        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3099            isVidBufRequested = true;
3100        }
3101    }
3102
3103    if (blob_request && mRawDumpChannel) {
3104        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3105        streamID.streamID[streamID.num_streams] =
3106            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3107        streamID.num_streams++;
3108    }
3109
3110    if(request->input_buffer == NULL) {
3111        /* Parse the settings:
3112         * - For every request in NORMAL MODE
3113         * - For every request in HFR mode during preview only case
3114         * - For first request of every batch in HFR mode during video
3115         * recording. In batchmode the same settings except frame number is
3116         * repeated in each request of the batch.
3117         */
3118        if (!mBatchSize ||
3119           (mBatchSize && !isVidBufRequested) ||
3120           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3121            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3122            if (rc < 0) {
3123                ALOGE("%s: fail to set frame parameters", __func__);
3124                pthread_mutex_unlock(&mMutex);
3125                return rc;
3126            }
3127        }
3128        /* For batchMode HFR, setFrameParameters is not called for every
3129         * request. But only frame number of the latest request is parsed */
3130        if (mBatchSize && ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3131                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3132            ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3133            return BAD_VALUE;
3134        }
3135        if (mNeedSensorRestart) {
3136            /* Unlock the mutex as restartSensor waits on the channels to be
3137             * stopped, which in turn calls stream callback functions -
3138             * handleBufferWithLock and handleMetadataWithLock */
3139            pthread_mutex_unlock(&mMutex);
3140            rc = dynamicUpdateMetaStreamInfo();
3141            if (rc != NO_ERROR) {
3142                ALOGE("%s: Restarting the sensor failed", __func__);
3143                return BAD_VALUE;
3144            }
3145            mNeedSensorRestart = false;
3146            pthread_mutex_lock(&mMutex);
3147        }
3148    } else {
3149
3150        if (request->input_buffer->acquire_fence != -1) {
3151           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3152           close(request->input_buffer->acquire_fence);
3153           if (rc != OK) {
3154              ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3155              pthread_mutex_unlock(&mMutex);
3156              return rc;
3157           }
3158        }
3159    }
3160
3161    /* Update pending request list and pending buffers map */
3162    PendingRequestInfo pendingRequest;
3163    pendingRequestIterator latestRequest;
3164    pendingRequest.frame_number = frameNumber;
3165    pendingRequest.num_buffers = request->num_output_buffers;
3166    pendingRequest.request_id = request_id;
3167    pendingRequest.blob_request = blob_request;
3168    pendingRequest.timestamp = 0;
3169    pendingRequest.bUrgentReceived = 0;
3170    if (request->input_buffer) {
3171        pendingRequest.input_buffer =
3172                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3173        *(pendingRequest.input_buffer) = *(request->input_buffer);
3174        pInputBuffer = pendingRequest.input_buffer;
3175    } else {
3176       pendingRequest.input_buffer = NULL;
3177       pInputBuffer = NULL;
3178    }
3179    pendingRequest.settings = request->settings;
3180    pendingRequest.pipeline_depth = 0;
3181    pendingRequest.partial_result_cnt = 0;
3182    extractJpegMetadata(pendingRequest.jpegMetadata, request);
3183
3184    //extract capture intent
3185    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3186        mCaptureIntent =
3187                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3188    }
3189    pendingRequest.capture_intent = mCaptureIntent;
3190
3191    for (size_t i = 0; i < request->num_output_buffers; i++) {
3192        RequestedBufferInfo requestedBuf;
3193        memset(&requestedBuf, 0, sizeof(requestedBuf));
3194        requestedBuf.stream = request->output_buffers[i].stream;
3195        requestedBuf.buffer = NULL;
3196        pendingRequest.buffers.push_back(requestedBuf);
3197
3198        // Add to buffer handle the pending buffers list
3199        PendingBufferInfo bufferInfo;
3200        bufferInfo.frame_number = frameNumber;
3201        bufferInfo.buffer = request->output_buffers[i].buffer;
3202        bufferInfo.stream = request->output_buffers[i].stream;
3203        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3204        mPendingBuffersMap.num_buffers++;
3205        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3206        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3207                __func__, frameNumber, bufferInfo.buffer,
3208                channel->getStreamTypeMask(), bufferInfo.stream->format);
3209    }
3210    latestRequest = mPendingRequestsList.insert(
3211            mPendingRequestsList.end(), pendingRequest);
3212    if(mFlush) {
3213        pthread_mutex_unlock(&mMutex);
3214        return NO_ERROR;
3215    }
3216
3217    // Notify metadata channel we receive a request
3218    mMetadataChannel->request(NULL, frameNumber);
3219
3220    if(request->input_buffer != NULL){
3221        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3222        if (NO_ERROR != rc) {
3223            ALOGE("%s: fail to set reproc parameters", __func__);
3224            pthread_mutex_unlock(&mMutex);
3225            return rc;
3226        }
3227    }
3228
3229    // Call request on other streams
3230    uint32_t streams_need_metadata = 0;
3231    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3232    for (size_t i = 0; i < request->num_output_buffers; i++) {
3233        const camera3_stream_buffer_t& output = request->output_buffers[i];
3234        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3235
3236        if (channel == NULL) {
3237            ALOGE("%s: invalid channel pointer for stream", __func__);
3238            continue;
3239        }
3240
3241        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3242            if(request->input_buffer != NULL){
3243                rc = channel->request(output.buffer, frameNumber,
3244                        pInputBuffer, &mReprocMeta);
3245                if (rc < 0) {
3246                    ALOGE("%s: Fail to request on picture channel", __func__);
3247                    pthread_mutex_unlock(&mMutex);
3248                    return rc;
3249                }
3250            } else {
3251                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3252                        __LINE__, output.buffer, frameNumber);
3253                if (!request->settings) {
3254                    rc = channel->request(output.buffer, frameNumber,
3255                            NULL, mPrevParameters);
3256                } else {
3257                    rc = channel->request(output.buffer, frameNumber,
3258                            NULL, mParameters);
3259                }
3260                if (rc < 0) {
3261                    ALOGE("%s: Fail to request on picture channel", __func__);
3262                    pthread_mutex_unlock(&mMutex);
3263                    return rc;
3264                }
3265                pendingBufferIter->need_metadata = true;
3266                streams_need_metadata++;
3267            }
3268        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3269            bool needMetadata = false;
3270            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3271            rc = yuvChannel->request(output.buffer, frameNumber,
3272                    pInputBuffer,
3273                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3274            if (rc < 0) {
3275                ALOGE("%s: Fail to request on YUV channel", __func__);
3276                pthread_mutex_unlock(&mMutex);
3277                return rc;
3278            }
3279            pendingBufferIter->need_metadata = needMetadata;
3280            if (needMetadata)
3281                streams_need_metadata += 1;
3282            CDBG("%s: calling YUV channel request, need_metadata is %d",
3283                    __func__, needMetadata);
3284        } else {
3285            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3286                __LINE__, output.buffer, frameNumber);
3287            rc = channel->request(output.buffer, frameNumber);
3288            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3289                    && mBatchSize) {
3290                mToBeQueuedVidBufs++;
3291                if (mToBeQueuedVidBufs == mBatchSize) {
3292                    channel->queueBatchBuf();
3293                }
3294            }
3295            if (rc < 0) {
3296                ALOGE("%s: request failed", __func__);
3297                pthread_mutex_unlock(&mMutex);
3298                return rc;
3299            }
3300        }
3301        pendingBufferIter++;
3302    }
3303
3304    //If 2 streams have need_metadata set to true, fail the request, unless
3305    //we copy/reference count the metadata buffer
3306    if (streams_need_metadata > 1) {
3307        //TODO: Return error from here
3308        ALOGE("s: not supporting request in which two streams requires"
3309                " 2 HAL metadata for reprocessing", __func__);
3310    }
3311
3312    if(request->input_buffer == NULL) {
3313        /* Set the parameters to backend:
3314         * - For every request in NORMAL MODE
3315         * - For every request in HFR mode during preview only case
3316         * - Once every batch in HFR mode during video recording
3317         */
3318        if (!mBatchSize ||
3319           (mBatchSize && !isVidBufRequested) ||
3320           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3321            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3322                    __func__, mBatchSize, isVidBufRequested,
3323                    mToBeQueuedVidBufs);
3324            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3325                    mParameters);
3326            if (rc < 0) {
3327                ALOGE("%s: set_parms failed", __func__);
3328            }
3329            /* reset to zero coz, the batch is queued */
3330            mToBeQueuedVidBufs = 0;
3331        }
3332    }
3333
3334    mFirstRequest = false;
3335    // Added a timed condition wait
3336    struct timespec ts;
3337    uint8_t isValidTimeout = 1;
3338    rc = clock_gettime(CLOCK_REALTIME, &ts);
3339    if (rc < 0) {
3340      isValidTimeout = 0;
3341      ALOGE("%s: Error reading the real time clock!!", __func__);
3342    }
3343    else {
3344      // Make timeout as 5 sec for request to be honored
3345      ts.tv_sec += 5;
3346    }
3347    //Block on conditional variable
3348
3349    mPendingRequest++;
3350    if (mBatchSize) {
3351        /* For HFR, more buffers are dequeued upfront to improve the performance */
3352        minInFlightRequests = (MIN_INFLIGHT_REQUESTS + 1) * mBatchSize;
3353        maxInFlightRequests = MAX_INFLIGHT_REQUESTS * mBatchSize;
3354    }
3355    while (mPendingRequest >= minInFlightRequests) {
3356        if (!isValidTimeout) {
3357            CDBG("%s: Blocking on conditional wait", __func__);
3358            pthread_cond_wait(&mRequestCond, &mMutex);
3359        }
3360        else {
3361            CDBG("%s: Blocking on timed conditional wait", __func__);
3362            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3363            if (rc == ETIMEDOUT) {
3364                rc = -ENODEV;
3365                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3366                break;
3367            }
3368        }
3369        CDBG("%s: Unblocked", __func__);
3370        if (mWokenUpByDaemon) {
3371            mWokenUpByDaemon = false;
3372            if (mPendingRequest < maxInFlightRequests)
3373                break;
3374        }
3375    }
3376    pthread_mutex_unlock(&mMutex);
3377
3378    return rc;
3379}
3380
3381/*===========================================================================
3382 * FUNCTION   : dump
3383 *
3384 * DESCRIPTION:
3385 *
3386 * PARAMETERS :
3387 *
3388 *
3389 * RETURN     :
3390 *==========================================================================*/
3391void QCamera3HardwareInterface::dump(int fd)
3392{
3393    pthread_mutex_lock(&mMutex);
3394    dprintf(fd, "\n Camera HAL3 information Begin \n");
3395
3396    dprintf(fd, "\nNumber of pending requests: %zu \n",
3397        mPendingRequestsList.size());
3398    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3399    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3400    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3401    for(pendingRequestIterator i = mPendingRequestsList.begin();
3402            i != mPendingRequestsList.end(); i++) {
3403        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3404        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3405        i->input_buffer);
3406    }
3407    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3408                mPendingBuffersMap.num_buffers);
3409    dprintf(fd, "-------+------------------\n");
3410    dprintf(fd, " Frame | Stream type mask \n");
3411    dprintf(fd, "-------+------------------\n");
3412    for(List<PendingBufferInfo>::iterator i =
3413        mPendingBuffersMap.mPendingBufferList.begin();
3414        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3415        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3416        dprintf(fd, " %5d | %11d \n",
3417                i->frame_number, channel->getStreamTypeMask());
3418    }
3419    dprintf(fd, "-------+------------------\n");
3420
3421    dprintf(fd, "\nPending frame drop list: %zu\n",
3422        mPendingFrameDropList.size());
3423    dprintf(fd, "-------+-----------\n");
3424    dprintf(fd, " Frame | Stream ID \n");
3425    dprintf(fd, "-------+-----------\n");
3426    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3427        i != mPendingFrameDropList.end(); i++) {
3428        dprintf(fd, " %5d | %9d \n",
3429            i->frame_number, i->stream_ID);
3430    }
3431    dprintf(fd, "-------+-----------\n");
3432
3433    dprintf(fd, "\n Camera HAL3 information End \n");
3434
3435    /* use dumpsys media.camera as trigger to send update debug level event */
3436    mUpdateDebugLevel = true;
3437    pthread_mutex_unlock(&mMutex);
3438    return;
3439}
3440
3441/*===========================================================================
3442 * FUNCTION   : flush
3443 *
3444 * DESCRIPTION:
3445 *
3446 * PARAMETERS :
3447 *
3448 *
3449 * RETURN     :
3450 *==========================================================================*/
3451int QCamera3HardwareInterface::flush()
3452{
3453    ATRACE_CALL();
3454    int32_t rc = NO_ERROR;
3455
3456    CDBG("%s: Unblocking Process Capture Request", __func__);
3457    pthread_mutex_lock(&mMutex);
3458    mFlush = true;
3459    pthread_mutex_unlock(&mMutex);
3460
3461    rc = stopAllChannels();
3462    if (rc < 0) {
3463        ALOGE("%s: stopAllChannels failed", __func__);
3464        return rc;
3465    }
3466
3467    // Mutex Lock
3468    pthread_mutex_lock(&mMutex);
3469
3470    // Unblock process_capture_request
3471    mPendingRequest = 0;
3472    pthread_cond_signal(&mRequestCond);
3473
3474    rc = notifyErrorForPendingRequests();
3475    if (rc < 0) {
3476        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3477        pthread_mutex_unlock(&mMutex);
3478        return rc;
3479    }
3480
3481    mFlush = false;
3482
3483    // Start the Streams/Channels
3484    rc = startAllChannels();
3485    if (rc < 0) {
3486        ALOGE("%s: startAllChannels failed", __func__);
3487        pthread_mutex_unlock(&mMutex);
3488        return rc;
3489    }
3490
3491    pthread_mutex_unlock(&mMutex);
3492
3493    return 0;
3494}
3495
3496/*===========================================================================
3497 * FUNCTION   : captureResultCb
3498 *
3499 * DESCRIPTION: Callback handler for all capture result
3500 *              (streams, as well as metadata)
3501 *
3502 * PARAMETERS :
3503 *   @metadata : metadata information
3504 *   @buffer   : actual gralloc buffer to be returned to frameworks.
3505 *               NULL if metadata.
3506 *
3507 * RETURN     : NONE
3508 *==========================================================================*/
3509void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3510                camera3_stream_buffer_t *buffer, uint32_t frame_number)
3511{
3512    if (metadata_buf) {
3513        if (mBatchSize) {
3514            handleBatchMetadata(metadata_buf,
3515                    true /* free_and_bufdone_meta_buf */);
3516        } else { /* mBatchSize = 0 */
3517            pthread_mutex_lock(&mMutex);
3518            handleMetadataWithLock(metadata_buf,
3519                    true /* free_and_bufdone_meta_buf */);
3520            pthread_mutex_unlock(&mMutex);
3521        }
3522    } else {
3523        pthread_mutex_lock(&mMutex);
3524        handleBufferWithLock(buffer, frame_number);
3525        pthread_mutex_unlock(&mMutex);
3526    }
3527    return;
3528}
3529
3530/*===========================================================================
3531 * FUNCTION   : getReprocessibleOutputStream
3532 *
3533 * DESCRIPTION: return the output stream corresponding to the supported input
3534 *              reprocess stream size, which would be the largest output stream
3535 *              if an input stream exists
3536 *
3537 * PARAMETERS : NONE
3538 *
3539 * RETURN     :
3540 *    stream_info_t* : pointer to largest output stream
3541 *    NULL if not found
3542 *==========================================================================*/
3543stream_info_t* QCamera3HardwareInterface::getReprocessibleOutputStream()
3544{
3545   /* check if any output or bidirectional stream has the input stream dimensions
3546      and return that stream */
3547   if ((mInputStreamSize.width > 0) && (mInputStreamSize.height > 0)) {
3548       for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3549           it != mStreamInfo.end(); it++) {
3550           if (((*it)->stream->width == (uint32_t)mInputStreamSize.width) &&
3551                   ((*it)->stream->height == (uint32_t)mInputStreamSize.height)) {
3552               CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
3553               return *it;
3554           }
3555       }
3556   } else {
3557       CDBG("%s: No input stream, so no reprocessible output stream", __func__);
3558   }
3559   CDBG("%s: Could not find reprocessible output stream", __func__);
3560   return NULL;
3561}
3562
3563/*===========================================================================
3564 * FUNCTION   : lookupFwkName
3565 *
3566 * DESCRIPTION: In case the enum is not same in fwk and backend
3567 *              make sure the parameter is correctly propogated
3568 *
3569 * PARAMETERS  :
3570 *   @arr      : map between the two enums
3571 *   @len      : len of the map
3572 *   @hal_name : name of the hal_parm to map
3573 *
3574 * RETURN     : int type of status
3575 *              fwk_name  -- success
3576 *              none-zero failure code
3577 *==========================================================================*/
3578template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3579        size_t len, halType hal_name)
3580{
3581
3582    for (size_t i = 0; i < len; i++) {
3583        if (arr[i].hal_name == hal_name) {
3584            return arr[i].fwk_name;
3585        }
3586    }
3587
3588    /* Not able to find matching framework type is not necessarily
3589     * an error case. This happens when mm-camera supports more attributes
3590     * than the frameworks do */
3591    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3592    return NAME_NOT_FOUND;
3593}
3594
3595/*===========================================================================
3596 * FUNCTION   : lookupHalName
3597 *
3598 * DESCRIPTION: In case the enum is not same in fwk and backend
3599 *              make sure the parameter is correctly propogated
3600 *
3601 * PARAMETERS  :
3602 *   @arr      : map between the two enums
3603 *   @len      : len of the map
3604 *   @fwk_name : name of the hal_parm to map
3605 *
3606 * RETURN     : int32_t type of status
3607 *              hal_name  -- success
3608 *              none-zero failure code
3609 *==========================================================================*/
3610template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3611        size_t len, fwkType fwk_name)
3612{
3613    for (size_t i = 0; i < len; i++) {
3614        if (arr[i].fwk_name == fwk_name) {
3615            return arr[i].hal_name;
3616        }
3617    }
3618
3619    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3620    return NAME_NOT_FOUND;
3621}
3622
3623/*===========================================================================
3624 * FUNCTION   : lookupProp
3625 *
3626 * DESCRIPTION: lookup a value by its name
3627 *
3628 * PARAMETERS :
3629 *   @arr     : map between the two enums
3630 *   @len     : size of the map
3631 *   @name    : name to be looked up
3632 *
3633 * RETURN     : Value if found
3634 *              CAM_CDS_MODE_MAX if not found
3635 *==========================================================================*/
3636template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
3637        size_t len, const char *name)
3638{
3639    if (name) {
3640        for (size_t i = 0; i < len; i++) {
3641            if (!strcmp(arr[i].desc, name)) {
3642                return arr[i].val;
3643            }
3644        }
3645    }
3646    return CAM_CDS_MODE_MAX;
3647}
3648
3649/*===========================================================================
3650 *
3651 * DESCRIPTION:
3652 *
3653 * PARAMETERS :
3654 *   @metadata : metadata information from callback
3655 *   @timestamp: metadata buffer timestamp
3656 *   @request_id: request id
3657 *   @jpegMetadata: additional jpeg metadata
3658 *
3659 * RETURN     : camera_metadata_t*
3660 *              metadata in a format specified by fwk
3661 *==========================================================================*/
3662camera_metadata_t*
3663QCamera3HardwareInterface::translateFromHalMetadata(
3664                                 metadata_buffer_t *metadata,
3665                                 nsecs_t timestamp,
3666                                 int32_t request_id,
3667                                 const CameraMetadata& jpegMetadata,
3668                                 uint8_t pipeline_depth,
3669                                 uint8_t capture_intent)
3670{
3671    CameraMetadata camMetadata;
3672    camera_metadata_t *resultMetadata;
3673
3674    if (jpegMetadata.entryCount())
3675        camMetadata.append(jpegMetadata);
3676
3677    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
3678    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
3679    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
3680    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
3681
3682    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
3683        int64_t fwk_frame_number = *frame_number;
3684        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3685    }
3686
3687    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
3688        int32_t fps_range[2];
3689        fps_range[0] = (int32_t)float_range->min_fps;
3690        fps_range[1] = (int32_t)float_range->max_fps;
3691        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3692                                      fps_range, 2);
3693        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
3694            __func__, fps_range[0], fps_range[1]);
3695    }
3696
3697    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
3698        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
3699    }
3700
3701    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
3702        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
3703                METADATA_MAP_SIZE(SCENE_MODES_MAP),
3704                *sceneMode);
3705        if (NAME_NOT_FOUND != val) {
3706            uint8_t fwkSceneMode = (uint8_t)val;
3707            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
3708            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
3709                    __func__, fwkSceneMode);
3710        }
3711    }
3712
3713    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
3714        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
3715        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
3716    }
3717
3718    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
3719        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
3720        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
3721    }
3722
3723    IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
3724            CAM_INTF_META_FACE_DETECTION, metadata) {
3725        uint8_t numFaces = MIN(faceDetectionInfo->num_faces_detected, MAX_ROI);
3726        int32_t faceIds[MAX_ROI];
3727        uint8_t faceScores[MAX_ROI];
3728        int32_t faceRectangles[MAX_ROI * 4];
3729        int32_t faceLandmarks[MAX_ROI * 6];
3730        size_t j = 0, k = 0;
3731
3732        for (size_t i = 0; i < numFaces; i++) {
3733            faceIds[i] = faceDetectionInfo->faces[i].face_id;
3734            faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
3735            // Adjust crop region from sensor output coordinate system to active
3736            // array coordinate system.
3737            cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
3738            mCropRegionMapper.toActiveArray(rect.left, rect.top,
3739                    rect.width, rect.height);
3740
3741            convertToRegions(faceDetectionInfo->faces[i].face_boundary,
3742                faceRectangles+j, -1);
3743
3744            // Map the co-ordinate sensor output coordinate system to active
3745            // array coordinate system.
3746            cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
3747            mCropRegionMapper.toActiveArray(face.left_eye_center.x,
3748                    face.left_eye_center.y);
3749            mCropRegionMapper.toActiveArray(face.right_eye_center.x,
3750                    face.right_eye_center.y);
3751            mCropRegionMapper.toActiveArray(face.mouth_center.x,
3752                    face.mouth_center.y);
3753
3754            convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
3755            j+= 4;
3756            k+= 6;
3757        }
3758        if (numFaces <= 0) {
3759            memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
3760            memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
3761            memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
3762            memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
3763        }
3764        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
3765        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
3766        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, faceRectangles, numFaces * 4U);
3767        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, faceLandmarks, numFaces * 6U);
3768    }
3769
3770    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
3771        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
3772        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
3773    }
3774
3775    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
3776            CAM_INTF_META_EDGE_MODE, metadata) {
3777        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
3778        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
3779        camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
3780    }
3781
3782    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
3783        uint8_t fwk_flashPower = (uint8_t) *flashPower;
3784        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
3785    }
3786
3787    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
3788        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
3789    }
3790
3791    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
3792        if (0 <= *flashState) {
3793            uint8_t fwk_flashState = (uint8_t) *flashState;
3794            if (!gCamCapability[mCameraId]->flash_available) {
3795                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
3796            }
3797            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
3798        }
3799    }
3800
3801    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
3802        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
3803        if (NAME_NOT_FOUND != val) {
3804            uint8_t fwk_flashMode = (uint8_t)val;
3805            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
3806        }
3807    }
3808
3809    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
3810        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
3811        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
3812    }
3813
3814    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
3815        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
3816    }
3817
3818    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
3819        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
3820    }
3821
3822    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
3823        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
3824    }
3825
3826    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
3827        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
3828        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
3829    }
3830
3831    /*EIS is currently not hooked up to the app, so set the mode to OFF*/
3832    uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3833    camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
3834
3835    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
3836        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
3837        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
3838    }
3839
3840    IF_META_AVAILABLE(uint32_t, noiseRedStrength, CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata) {
3841        uint8_t fwk_noiseRedStrength = (uint8_t) *noiseRedStrength;
3842        camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, &fwk_noiseRedStrength, 1);
3843    }
3844
3845    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
3846        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
3847    }
3848
3849    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelInd, CAM_INTF_META_BLACK_LEVEL_IND, metadata) {
3850        int32_t fwk_blackLevelInd[4];
3851        fwk_blackLevelInd[0] = blackLevelInd->cam_black_level[0];
3852        fwk_blackLevelInd[1] = blackLevelInd->cam_black_level[1];
3853        fwk_blackLevelInd[2] = blackLevelInd->cam_black_level[2];
3854        fwk_blackLevelInd[3] = blackLevelInd->cam_black_level[3];
3855
3856        CDBG("%s: dynamicblackLevel = %d %d %d %d", __func__,
3857            blackLevelInd->cam_black_level[0],
3858            blackLevelInd->cam_black_level[1],
3859            blackLevelInd->cam_black_level[2],
3860            blackLevelInd->cam_black_level[3]);
3861        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
3862    }
3863
3864    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
3865            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
3866        int32_t scalerCropRegion[4];
3867        scalerCropRegion[0] = hScalerCropRegion->left;
3868        scalerCropRegion[1] = hScalerCropRegion->top;
3869        scalerCropRegion[2] = hScalerCropRegion->width;
3870        scalerCropRegion[3] = hScalerCropRegion->height;
3871
3872        // Adjust crop region from sensor output coordinate system to active
3873        // array coordinate system.
3874        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
3875                scalerCropRegion[2], scalerCropRegion[3]);
3876
3877        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
3878    }
3879
3880    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
3881        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
3882        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
3883    }
3884
3885    IF_META_AVAILABLE(int64_t, sensorFameDuration,
3886            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
3887        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
3888        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
3889    }
3890
3891    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
3892            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
3893        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
3894        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
3895                sensorRollingShutterSkew, 1);
3896    }
3897
3898    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
3899        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
3900        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
3901
3902        //calculate the noise profile based on sensitivity
3903        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
3904        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
3905        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
3906        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
3907            noise_profile[i]   = noise_profile_S;
3908            noise_profile[i+1] = noise_profile_O;
3909        }
3910        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
3911                noise_profile_S, noise_profile_O);
3912        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
3913                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
3914    }
3915
3916    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
3917        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
3918        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
3919    }
3920
3921    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
3922        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
3923                *faceDetectMode);
3924        if (NAME_NOT_FOUND != val) {
3925            uint8_t fwk_faceDetectMode = (uint8_t)val;
3926            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
3927        }
3928    }
3929
3930    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
3931        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
3932        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
3933    }
3934
3935    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
3936            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
3937        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
3938        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
3939    }
3940
3941    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
3942            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
3943        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
3944                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
3945    }
3946
3947    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
3948            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
3949        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
3950                CAM_MAX_SHADING_MAP_HEIGHT);
3951        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
3952                CAM_MAX_SHADING_MAP_WIDTH);
3953        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
3954                lensShadingMap->lens_shading, 4U * map_width * map_height);
3955    }
3956
3957    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
3958        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
3959        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
3960    }
3961
3962    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
3963        //Populate CAM_INTF_META_TONEMAP_CURVES
3964        /* ch0 = G, ch 1 = B, ch 2 = R*/
3965        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
3966            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
3967                    __func__, tonemap->tonemap_points_cnt,
3968                    CAM_MAX_TONEMAP_CURVE_SIZE);
3969            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
3970        }
3971
3972        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
3973                        &tonemap->curves[0].tonemap_points[0][0],
3974                        tonemap->tonemap_points_cnt * 2);
3975
3976        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
3977                        &tonemap->curves[1].tonemap_points[0][0],
3978                        tonemap->tonemap_points_cnt * 2);
3979
3980        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
3981                        &tonemap->curves[2].tonemap_points[0][0],
3982                        tonemap->tonemap_points_cnt * 2);
3983    }
3984
3985    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
3986            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
3987        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
3988                CC_GAINS_COUNT);
3989    }
3990
3991    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
3992            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
3993        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
3994                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
3995                CC_MATRIX_COLS * CC_MATRIX_ROWS);
3996    }
3997
3998    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
3999            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4000        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4001            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4002                    __func__, toneCurve->tonemap_points_cnt,
4003                    CAM_MAX_TONEMAP_CURVE_SIZE);
4004            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4005        }
4006        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4007                (float*)toneCurve->curve.tonemap_points,
4008                toneCurve->tonemap_points_cnt * 2);
4009    }
4010
4011    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4012            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4013        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4014                predColorCorrectionGains->gains, 4);
4015    }
4016
4017    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4018            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4019        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4020                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4021                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4022    }
4023
4024    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4025        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4026    }
4027
4028    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4029        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4030        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4031    }
4032
4033    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4034        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4035        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4036    }
4037
4038    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4039        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4040                *effectMode);
4041        if (NAME_NOT_FOUND != val) {
4042            uint8_t fwk_effectMode = (uint8_t)val;
4043            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4044        }
4045    }
4046
4047    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4048            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4049        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4050                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4051        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4052            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4053        }
4054        int32_t fwk_testPatternData[4];
4055        fwk_testPatternData[0] = testPatternData->r;
4056        fwk_testPatternData[3] = testPatternData->b;
4057        switch (gCamCapability[mCameraId]->color_arrangement) {
4058        case CAM_FILTER_ARRANGEMENT_RGGB:
4059        case CAM_FILTER_ARRANGEMENT_GRBG:
4060            fwk_testPatternData[1] = testPatternData->gr;
4061            fwk_testPatternData[2] = testPatternData->gb;
4062            break;
4063        case CAM_FILTER_ARRANGEMENT_GBRG:
4064        case CAM_FILTER_ARRANGEMENT_BGGR:
4065            fwk_testPatternData[2] = testPatternData->gr;
4066            fwk_testPatternData[1] = testPatternData->gb;
4067            break;
4068        default:
4069            ALOGE("%s: color arrangement %d is not supported", __func__,
4070                gCamCapability[mCameraId]->color_arrangement);
4071            break;
4072        }
4073        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4074    }
4075
4076    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4077        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4078    }
4079
4080    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4081        String8 str((const char *)gps_methods);
4082        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4083    }
4084
4085    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4086        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4087    }
4088
4089    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4090        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4091    }
4092
4093    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4094        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4095        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4096    }
4097
4098    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4099        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4100        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4101    }
4102
4103    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4104        int32_t fwk_thumb_size[2];
4105        fwk_thumb_size[0] = thumb_size->width;
4106        fwk_thumb_size[1] = thumb_size->height;
4107        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4108    }
4109
4110    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4111        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4112                privateData,
4113                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4114    }
4115
4116    if (metadata->is_tuning_params_valid) {
4117        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4118        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4119        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4120
4121
4122        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4123                sizeof(uint32_t));
4124        data += sizeof(uint32_t);
4125
4126        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4127                sizeof(uint32_t));
4128        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4129        data += sizeof(uint32_t);
4130
4131        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4132                sizeof(uint32_t));
4133        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4134        data += sizeof(uint32_t);
4135
4136        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4137                sizeof(uint32_t));
4138        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4139        data += sizeof(uint32_t);
4140
4141        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4142                sizeof(uint32_t));
4143        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4144        data += sizeof(uint32_t);
4145
4146        metadata->tuning_params.tuning_mod3_data_size = 0;
4147        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4148                sizeof(uint32_t));
4149        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4150        data += sizeof(uint32_t);
4151
4152        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4153                TUNING_SENSOR_DATA_MAX);
4154        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4155                count);
4156        data += count;
4157
4158        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4159                TUNING_VFE_DATA_MAX);
4160        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4161                count);
4162        data += count;
4163
4164        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4165                TUNING_CPP_DATA_MAX);
4166        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4167                count);
4168        data += count;
4169
4170        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4171                TUNING_CAC_DATA_MAX);
4172        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4173                count);
4174        data += count;
4175
4176        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4177                (int32_t *)(void *)tuning_meta_data_blob,
4178                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4179    }
4180
4181    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4182            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4183        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4184                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4185                NEUTRAL_COL_POINTS);
4186    }
4187
4188    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4189        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4190        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4191    }
4192
4193    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4194        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4195        // Adjust crop region from sensor output coordinate system to active
4196        // array coordinate system.
4197        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4198                hAeRegions->rect.width, hAeRegions->rect.height);
4199
4200        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4201        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4202                REGIONS_TUPLE_COUNT);
4203        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4204                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4205                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4206                hAeRegions->rect.height);
4207    }
4208
4209    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4210        /*af regions*/
4211        int32_t afRegions[REGIONS_TUPLE_COUNT];
4212        // Adjust crop region from sensor output coordinate system to active
4213        // array coordinate system.
4214        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4215                hAfRegions->rect.width, hAfRegions->rect.height);
4216
4217        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4218        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4219                REGIONS_TUPLE_COUNT);
4220        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4221                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4222                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4223                hAfRegions->rect.height);
4224    }
4225
4226    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4227        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4228                *hal_ab_mode);
4229        if (NAME_NOT_FOUND != val) {
4230            uint8_t fwk_ab_mode = (uint8_t)val;
4231            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4232        }
4233    }
4234
4235    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4236        int val = lookupFwkName(SCENE_MODES_MAP,
4237                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4238        if (NAME_NOT_FOUND != val) {
4239            uint8_t fwkBestshotMode = (uint8_t)val;
4240            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4241            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4242        } else {
4243            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4244        }
4245    }
4246
4247    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4248         uint8_t fwk_mode = (uint8_t) *mode;
4249         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4250    }
4251
4252    /* Constant metadata values to be update*/
4253    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4254    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4255
4256    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4257    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4258
4259    int32_t hotPixelMap[2];
4260    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4261
4262    // CDS
4263    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4264        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4265    }
4266
4267    // Reprocess crop data
4268    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4269        uint8_t cnt = crop_data->num_of_streams;
4270        if ((0 < cnt) && (cnt < MAX_NUM_STREAMS)) {
4271            stream_info_t* reprocessible_stream = getReprocessibleOutputStream();
4272            if (NULL == reprocessible_stream) {
4273                CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4274            } else {
4275                QCamera3Channel *channel = (QCamera3Channel *)reprocessible_stream->stream->priv;
4276                int rc = NO_ERROR;
4277                Vector<int32_t> roi_map;
4278                int32_t *crop = new int32_t[cnt*4];
4279                if (NULL == crop) {
4280                   rc = NO_MEMORY;
4281                }
4282                if (NO_ERROR == rc && NULL != channel) {
4283                    int32_t streams_found = 0;
4284                    uint32_t reprocessible_stream_id = channel->mStreams[0]->getMyServerID();
4285                    for (size_t i = 0; i < cnt; i++) {
4286                        if (crop_data->crop_info[i].stream_id == reprocessible_stream_id) {
4287                            crop[0] = crop_data->crop_info[i].crop.left;
4288                            crop[1] = crop_data->crop_info[i].crop.top;
4289                            crop[2] = crop_data->crop_info[i].crop.width;
4290                            crop[3] = crop_data->crop_info[i].crop.height;
4291                            roi_map.add(crop_data->crop_info[i].roi_map.left);
4292                            roi_map.add(crop_data->crop_info[i].roi_map.top);
4293                            roi_map.add(crop_data->crop_info[i].roi_map.width);
4294                            roi_map.add(crop_data->crop_info[i].roi_map.height);
4295                            streams_found++;
4296                            CDBG("%s: Adding reprocess crop data for stream %p %dx%d, %dx%d",
4297                                    __func__,
4298                                    reprocessible_stream->stream,
4299                                    crop_data->crop_info[i].crop.left,
4300                                    crop_data->crop_info[i].crop.top,
4301                                    crop_data->crop_info[i].crop.width,
4302                                    crop_data->crop_info[i].crop.height);
4303                            CDBG("%s: Adding reprocess crop roi map for stream %p %dx%d, %dx%d",
4304                                    __func__,
4305                                    reprocessible_stream->stream,
4306                                    crop_data->crop_info[i].roi_map.left,
4307                                    crop_data->crop_info[i].roi_map.top,
4308                                    crop_data->crop_info[i].roi_map.width,
4309                                    crop_data->crop_info[i].roi_map.height);
4310                            break;
4311
4312                       }
4313                    }
4314                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4315                            &streams_found, 1);
4316                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
4317                            crop, (size_t)(streams_found * 4));
4318                    if (roi_map.array()) {
4319                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4320                                roi_map.array(), roi_map.size());
4321                    }
4322               }
4323               if (crop) {
4324                   delete [] crop;
4325               }
4326            }
4327        } else {
4328            // mm-qcamera-daemon only posts crop_data for streams
4329            // not linked to pproc. So no valid crop metadata is not
4330            // necessarily an error case.
4331            CDBG("%s: No valid crop metadata entries", __func__);
4332        }
4333    }
4334
4335    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4336        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4337                *cacMode);
4338        if (NAME_NOT_FOUND != val) {
4339            uint8_t fwkCacMode = (uint8_t)val;
4340            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4341        } else {
4342            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4343        }
4344    }
4345
4346    resultMetadata = camMetadata.release();
4347    return resultMetadata;
4348}
4349
4350/*===========================================================================
4351 * FUNCTION   : saveExifParams
4352 *
4353 * DESCRIPTION:
4354 *
4355 * PARAMETERS :
4356 *   @metadata : metadata information from callback
4357 *
4358 * RETURN     : none
4359 *
4360 *==========================================================================*/
4361void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4362{
4363    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4364            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4365        mExifParams.ae_debug_params = *ae_exif_debug_params;
4366        mExifParams.ae_debug_params_valid = TRUE;
4367    }
4368    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4369            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4370        mExifParams.awb_debug_params = *awb_exif_debug_params;
4371        mExifParams.awb_debug_params_valid = TRUE;
4372    }
4373    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4374            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4375        mExifParams.af_debug_params = *af_exif_debug_params;
4376        mExifParams.af_debug_params_valid = TRUE;
4377    }
4378    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4379            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4380        mExifParams.asd_debug_params = *asd_exif_debug_params;
4381        mExifParams.asd_debug_params_valid = TRUE;
4382    }
4383    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4384            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4385        mExifParams.stats_debug_params = *stats_exif_debug_params;
4386        mExifParams.stats_debug_params_valid = TRUE;
4387    }
4388}
4389
4390/*===========================================================================
4391 * FUNCTION   : get3AExifParams
4392 *
4393 * DESCRIPTION:
4394 *
4395 * PARAMETERS : none
4396 *
4397 *
4398 * RETURN     : mm_jpeg_exif_params_t
4399 *
4400 *==========================================================================*/
4401mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4402{
4403    return mExifParams;
4404}
4405
4406/*===========================================================================
4407 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4408 *
4409 * DESCRIPTION:
4410 *
4411 * PARAMETERS :
4412 *   @metadata : metadata information from callback
4413 *
4414 * RETURN     : camera_metadata_t*
4415 *              metadata in a format specified by fwk
4416 *==========================================================================*/
4417camera_metadata_t*
4418QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4419                                (metadata_buffer_t *metadata)
4420{
4421    CameraMetadata camMetadata;
4422    camera_metadata_t *resultMetadata;
4423
4424    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4425        uint8_t fwk_afState = (uint8_t) *afState;
4426        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4427        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4428    }
4429
4430    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4431        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4432    }
4433
4434    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4435        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4436    }
4437
4438    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4439        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4440        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4441        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4442    }
4443
4444    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4445        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4446                &aecTrigger->trigger, 1);
4447        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4448                &aecTrigger->trigger_id, 1);
4449        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4450                __func__, aecTrigger->trigger);
4451        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4452                aecTrigger->trigger_id);
4453    }
4454
4455    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4456        uint8_t fwk_ae_state = (uint8_t) *ae_state;
4457        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4458        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4459    }
4460
4461    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4462        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4463        if (NAME_NOT_FOUND != val) {
4464            uint8_t fwkAfMode = (uint8_t)val;
4465            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4466            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4467        } else {
4468            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4469                    val);
4470        }
4471    }
4472
4473    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4474        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4475                &af_trigger->trigger, 1);
4476        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4477                __func__, af_trigger->trigger);
4478        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4479        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4480                af_trigger->trigger_id);
4481    }
4482
4483    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4484        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4485                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4486        if (NAME_NOT_FOUND != val) {
4487            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4488            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4489            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4490        } else {
4491            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4492        }
4493    }
4494
4495    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4496    uint32_t aeMode = CAM_AE_MODE_MAX;
4497    int32_t flashMode = CAM_FLASH_MODE_MAX;
4498    int32_t redeye = -1;
4499    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4500        aeMode = *pAeMode;
4501    }
4502    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4503        flashMode = *pFlashMode;
4504    }
4505    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4506        redeye = *pRedeye;
4507    }
4508
4509    if (1 == redeye) {
4510        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4511        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4512    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4513        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4514                flashMode);
4515        if (NAME_NOT_FOUND != val) {
4516            fwk_aeMode = (uint8_t)val;
4517            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4518        } else {
4519            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4520        }
4521    } else if (aeMode == CAM_AE_MODE_ON) {
4522        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4523        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4524    } else if (aeMode == CAM_AE_MODE_OFF) {
4525        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4526        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4527    } else {
4528        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4529              "flashMode:%d, aeMode:%u!!!",
4530                __func__, redeye, flashMode, aeMode);
4531    }
4532
4533    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4534        uint8_t fwk_lensState = *lensState;
4535        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4536    }
4537
4538    resultMetadata = camMetadata.release();
4539    return resultMetadata;
4540}
4541
4542/*===========================================================================
4543 * FUNCTION   : dumpMetadataToFile
4544 *
4545 * DESCRIPTION: Dumps tuning metadata to file system
4546 *
4547 * PARAMETERS :
4548 *   @meta           : tuning metadata
4549 *   @dumpFrameCount : current dump frame count
4550 *   @enabled        : Enable mask
4551 *
4552 *==========================================================================*/
4553void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
4554                                                   uint32_t &dumpFrameCount,
4555                                                   bool enabled,
4556                                                   const char *type,
4557                                                   uint32_t frameNumber)
4558{
4559    uint32_t frm_num = 0;
4560
4561    //Some sanity checks
4562    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
4563        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
4564              __func__,
4565              meta.tuning_sensor_data_size,
4566              TUNING_SENSOR_DATA_MAX);
4567        return;
4568    }
4569
4570    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
4571        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
4572              __func__,
4573              meta.tuning_vfe_data_size,
4574              TUNING_VFE_DATA_MAX);
4575        return;
4576    }
4577
4578    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
4579        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
4580              __func__,
4581              meta.tuning_cpp_data_size,
4582              TUNING_CPP_DATA_MAX);
4583        return;
4584    }
4585
4586    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
4587        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
4588              __func__,
4589              meta.tuning_cac_data_size,
4590              TUNING_CAC_DATA_MAX);
4591        return;
4592    }
4593    //
4594
4595    if(enabled){
4596        char timeBuf[FILENAME_MAX];
4597        char buf[FILENAME_MAX];
4598        memset(buf, 0, sizeof(buf));
4599        memset(timeBuf, 0, sizeof(timeBuf));
4600        time_t current_time;
4601        struct tm * timeinfo;
4602        time (&current_time);
4603        timeinfo = localtime (&current_time);
4604        if (timeinfo != NULL) {
4605            strftime (timeBuf, sizeof(timeBuf),
4606                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
4607        }
4608        String8 filePath(timeBuf);
4609        snprintf(buf,
4610                sizeof(buf),
4611                "%dm_%s_%d.bin",
4612                dumpFrameCount,
4613                type,
4614                frameNumber);
4615        filePath.append(buf);
4616        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
4617        if (file_fd >= 0) {
4618            ssize_t written_len = 0;
4619            meta.tuning_data_version = TUNING_DATA_VERSION;
4620            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
4621            written_len += write(file_fd, data, sizeof(uint32_t));
4622            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
4623            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4624            written_len += write(file_fd, data, sizeof(uint32_t));
4625            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
4626            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4627            written_len += write(file_fd, data, sizeof(uint32_t));
4628            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
4629            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4630            written_len += write(file_fd, data, sizeof(uint32_t));
4631            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
4632            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4633            written_len += write(file_fd, data, sizeof(uint32_t));
4634            meta.tuning_mod3_data_size = 0;
4635            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
4636            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4637            written_len += write(file_fd, data, sizeof(uint32_t));
4638            size_t total_size = meta.tuning_sensor_data_size;
4639            data = (void *)((uint8_t *)&meta.data);
4640            written_len += write(file_fd, data, total_size);
4641            total_size = meta.tuning_vfe_data_size;
4642            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
4643            written_len += write(file_fd, data, total_size);
4644            total_size = meta.tuning_cpp_data_size;
4645            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
4646            written_len += write(file_fd, data, total_size);
4647            total_size = meta.tuning_cac_data_size;
4648            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
4649            written_len += write(file_fd, data, total_size);
4650            close(file_fd);
4651        }else {
4652            ALOGE("%s: fail to open file for metadata dumping", __func__);
4653        }
4654    }
4655}
4656
4657/*===========================================================================
4658 * FUNCTION   : cleanAndSortStreamInfo
4659 *
4660 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
4661 *              and sort them such that raw stream is at the end of the list
4662 *              This is a workaround for camera daemon constraint.
4663 *
4664 * PARAMETERS : None
4665 *
4666 *==========================================================================*/
4667void QCamera3HardwareInterface::cleanAndSortStreamInfo()
4668{
4669    List<stream_info_t *> newStreamInfo;
4670
4671    /*clean up invalid streams*/
4672    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
4673            it != mStreamInfo.end();) {
4674        if(((*it)->status) == INVALID){
4675            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
4676            delete channel;
4677            free(*it);
4678            it = mStreamInfo.erase(it);
4679        } else {
4680            it++;
4681        }
4682    }
4683
4684    // Move preview/video/callback/snapshot streams into newList
4685    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4686            it != mStreamInfo.end();) {
4687        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
4688                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
4689                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
4690            newStreamInfo.push_back(*it);
4691            it = mStreamInfo.erase(it);
4692        } else
4693            it++;
4694    }
4695    // Move raw streams into newList
4696    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4697            it != mStreamInfo.end();) {
4698        newStreamInfo.push_back(*it);
4699        it = mStreamInfo.erase(it);
4700    }
4701
4702    mStreamInfo = newStreamInfo;
4703}
4704
4705/*===========================================================================
4706 * FUNCTION   : extractJpegMetadata
4707 *
4708 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
4709 *              JPEG metadata is cached in HAL, and return as part of capture
4710 *              result when metadata is returned from camera daemon.
4711 *
4712 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
4713 *              @request:      capture request
4714 *
4715 *==========================================================================*/
4716void QCamera3HardwareInterface::extractJpegMetadata(
4717        CameraMetadata& jpegMetadata,
4718        const camera3_capture_request_t *request)
4719{
4720    CameraMetadata frame_settings;
4721    frame_settings = request->settings;
4722
4723    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
4724        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
4725                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
4726                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
4727
4728    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
4729        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
4730                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
4731                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
4732
4733    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
4734        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
4735                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
4736                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
4737
4738    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
4739        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
4740                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
4741                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
4742
4743    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
4744        jpegMetadata.update(ANDROID_JPEG_QUALITY,
4745                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
4746                frame_settings.find(ANDROID_JPEG_QUALITY).count);
4747
4748    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
4749        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
4750                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
4751                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
4752
4753    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
4754        int32_t thumbnail_size[2];
4755        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
4756        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
4757        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
4758            int32_t orientation =
4759                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
4760            if ((orientation == 90) || (orientation == 270)) {
4761               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
4762               int32_t temp;
4763               temp = thumbnail_size[0];
4764               thumbnail_size[0] = thumbnail_size[1];
4765               thumbnail_size[1] = temp;
4766            }
4767         }
4768         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
4769                thumbnail_size,
4770                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
4771    }
4772
4773}
4774
4775/*===========================================================================
4776 * FUNCTION   : convertToRegions
4777 *
4778 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
4779 *
4780 * PARAMETERS :
4781 *   @rect   : cam_rect_t struct to convert
4782 *   @region : int32_t destination array
4783 *   @weight : if we are converting from cam_area_t, weight is valid
4784 *             else weight = -1
4785 *
4786 *==========================================================================*/
4787void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
4788        int32_t *region, int weight)
4789{
4790    region[0] = rect.left;
4791    region[1] = rect.top;
4792    region[2] = rect.left + rect.width;
4793    region[3] = rect.top + rect.height;
4794    if (weight > -1) {
4795        region[4] = weight;
4796    }
4797}
4798
4799/*===========================================================================
4800 * FUNCTION   : convertFromRegions
4801 *
4802 * DESCRIPTION: helper method to convert from array to cam_rect_t
4803 *
4804 * PARAMETERS :
4805 *   @rect   : cam_rect_t struct to convert
4806 *   @region : int32_t destination array
4807 *   @weight : if we are converting from cam_area_t, weight is valid
4808 *             else weight = -1
4809 *
4810 *==========================================================================*/
4811void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
4812        const camera_metadata_t *settings, uint32_t tag)
4813{
4814    CameraMetadata frame_settings;
4815    frame_settings = settings;
4816    int32_t x_min = frame_settings.find(tag).data.i32[0];
4817    int32_t y_min = frame_settings.find(tag).data.i32[1];
4818    int32_t x_max = frame_settings.find(tag).data.i32[2];
4819    int32_t y_max = frame_settings.find(tag).data.i32[3];
4820    roi.weight = frame_settings.find(tag).data.i32[4];
4821    roi.rect.left = x_min;
4822    roi.rect.top = y_min;
4823    roi.rect.width = x_max - x_min;
4824    roi.rect.height = y_max - y_min;
4825}
4826
4827/*===========================================================================
4828 * FUNCTION   : resetIfNeededROI
4829 *
4830 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
4831 *              crop region
4832 *
4833 * PARAMETERS :
4834 *   @roi       : cam_area_t struct to resize
4835 *   @scalerCropRegion : cam_crop_region_t region to compare against
4836 *
4837 *
4838 *==========================================================================*/
4839bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
4840                                                 const cam_crop_region_t* scalerCropRegion)
4841{
4842    int32_t roi_x_max = roi->rect.width + roi->rect.left;
4843    int32_t roi_y_max = roi->rect.height + roi->rect.top;
4844    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
4845    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
4846
4847    /* According to spec weight = 0 is used to indicate roi needs to be disabled
4848     * without having this check the calculations below to validate if the roi
4849     * is inside scalar crop region will fail resulting in the roi not being
4850     * reset causing algorithm to continue to use stale roi window
4851     */
4852    if (roi->weight == 0) {
4853        return true;
4854    }
4855
4856    if ((roi_x_max < scalerCropRegion->left) ||
4857        // right edge of roi window is left of scalar crop's left edge
4858        (roi_y_max < scalerCropRegion->top)  ||
4859        // bottom edge of roi window is above scalar crop's top edge
4860        (roi->rect.left > crop_x_max) ||
4861        // left edge of roi window is beyond(right) of scalar crop's right edge
4862        (roi->rect.top > crop_y_max)){
4863        // top edge of roi windo is above scalar crop's top edge
4864        return false;
4865    }
4866    if (roi->rect.left < scalerCropRegion->left) {
4867        roi->rect.left = scalerCropRegion->left;
4868    }
4869    if (roi->rect.top < scalerCropRegion->top) {
4870        roi->rect.top = scalerCropRegion->top;
4871    }
4872    if (roi_x_max > crop_x_max) {
4873        roi_x_max = crop_x_max;
4874    }
4875    if (roi_y_max > crop_y_max) {
4876        roi_y_max = crop_y_max;
4877    }
4878    roi->rect.width = roi_x_max - roi->rect.left;
4879    roi->rect.height = roi_y_max - roi->rect.top;
4880    return true;
4881}
4882
4883/*===========================================================================
4884 * FUNCTION   : convertLandmarks
4885 *
4886 * DESCRIPTION: helper method to extract the landmarks from face detection info
4887 *
4888 * PARAMETERS :
4889 *   @face   : cam_rect_t struct to convert
4890 *   @landmarks : int32_t destination array
4891 *
4892 *
4893 *==========================================================================*/
4894void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
4895{
4896    landmarks[0] = (int32_t)face.left_eye_center.x;
4897    landmarks[1] = (int32_t)face.left_eye_center.y;
4898    landmarks[2] = (int32_t)face.right_eye_center.x;
4899    landmarks[3] = (int32_t)face.right_eye_center.y;
4900    landmarks[4] = (int32_t)face.mouth_center.x;
4901    landmarks[5] = (int32_t)face.mouth_center.y;
4902}
4903
4904#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
4905/*===========================================================================
4906 * FUNCTION   : initCapabilities
4907 *
4908 * DESCRIPTION: initialize camera capabilities in static data struct
4909 *
4910 * PARAMETERS :
4911 *   @cameraId  : camera Id
4912 *
4913 * RETURN     : int32_t type of status
4914 *              NO_ERROR  -- success
4915 *              none-zero failure code
4916 *==========================================================================*/
4917int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
4918{
4919    int rc = 0;
4920    mm_camera_vtbl_t *cameraHandle = NULL;
4921    QCamera3HeapMemory *capabilityHeap = NULL;
4922
4923    rc = camera_open((uint8_t)cameraId, &cameraHandle);
4924    if (rc || !cameraHandle) {
4925        ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
4926        goto open_failed;
4927    }
4928
4929    capabilityHeap = new QCamera3HeapMemory();
4930    if (capabilityHeap == NULL) {
4931        ALOGE("%s: creation of capabilityHeap failed", __func__);
4932        goto heap_creation_failed;
4933    }
4934    /* Allocate memory for capability buffer */
4935    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
4936    if(rc != OK) {
4937        ALOGE("%s: No memory for cappability", __func__);
4938        goto allocate_failed;
4939    }
4940
4941    /* Map memory for capability buffer */
4942    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
4943    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
4944                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
4945                                capabilityHeap->getFd(0),
4946                                sizeof(cam_capability_t));
4947    if(rc < 0) {
4948        ALOGE("%s: failed to map capability buffer", __func__);
4949        goto map_failed;
4950    }
4951
4952    /* Query Capability */
4953    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
4954    if(rc < 0) {
4955        ALOGE("%s: failed to query capability",__func__);
4956        goto query_failed;
4957    }
4958    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
4959    if (!gCamCapability[cameraId]) {
4960        ALOGE("%s: out of memory", __func__);
4961        goto query_failed;
4962    }
4963    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
4964                                        sizeof(cam_capability_t));
4965    rc = 0;
4966
4967query_failed:
4968    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
4969                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
4970map_failed:
4971    capabilityHeap->deallocate();
4972allocate_failed:
4973    delete capabilityHeap;
4974heap_creation_failed:
4975    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
4976    cameraHandle = NULL;
4977open_failed:
4978    return rc;
4979}
4980
4981/*===========================================================================
4982 * FUNCTION   : initParameters
4983 *
4984 * DESCRIPTION: initialize camera parameters
4985 *
4986 * PARAMETERS :
4987 *
4988 * RETURN     : int32_t type of status
4989 *              NO_ERROR  -- success
4990 *              none-zero failure code
4991 *==========================================================================*/
4992int QCamera3HardwareInterface::initParameters()
4993{
4994    int rc = 0;
4995
4996    //Allocate Set Param Buffer
4997    mParamHeap = new QCamera3HeapMemory();
4998    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
4999    if(rc != OK) {
5000        rc = NO_MEMORY;
5001        ALOGE("Failed to allocate SETPARM Heap memory");
5002        delete mParamHeap;
5003        mParamHeap = NULL;
5004        return rc;
5005    }
5006
5007    //Map memory for parameters buffer
5008    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5009            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5010            mParamHeap->getFd(0),
5011            sizeof(metadata_buffer_t));
5012    if(rc < 0) {
5013        ALOGE("%s:failed to map SETPARM buffer",__func__);
5014        rc = FAILED_TRANSACTION;
5015        mParamHeap->deallocate();
5016        delete mParamHeap;
5017        mParamHeap = NULL;
5018        return rc;
5019    }
5020
5021    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5022
5023    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5024    return rc;
5025}
5026
5027/*===========================================================================
5028 * FUNCTION   : deinitParameters
5029 *
5030 * DESCRIPTION: de-initialize camera parameters
5031 *
5032 * PARAMETERS :
5033 *
5034 * RETURN     : NONE
5035 *==========================================================================*/
5036void QCamera3HardwareInterface::deinitParameters()
5037{
5038    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5039            CAM_MAPPING_BUF_TYPE_PARM_BUF);
5040
5041    mParamHeap->deallocate();
5042    delete mParamHeap;
5043    mParamHeap = NULL;
5044
5045    mParameters = NULL;
5046
5047    free(mPrevParameters);
5048    mPrevParameters = NULL;
5049}
5050
5051/*===========================================================================
5052 * FUNCTION   : calcMaxJpegSize
5053 *
5054 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5055 *
5056 * PARAMETERS :
5057 *
5058 * RETURN     : max_jpeg_size
5059 *==========================================================================*/
5060size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5061{
5062    size_t max_jpeg_size = 0;
5063    size_t temp_width, temp_height;
5064    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5065            MAX_SIZES_CNT);
5066    for (size_t i = 0; i < count; i++) {
5067        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5068        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5069        if (temp_width * temp_height > max_jpeg_size ) {
5070            max_jpeg_size = temp_width * temp_height;
5071        }
5072    }
5073    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5074    return max_jpeg_size;
5075}
5076
5077/*===========================================================================
5078 * FUNCTION   : getMaxRawSize
5079 *
5080 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5081 *
5082 * PARAMETERS :
5083 *
5084 * RETURN     : Largest supported Raw Dimension
5085 *==========================================================================*/
5086cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5087{
5088    int max_width = 0;
5089    cam_dimension_t maxRawSize;
5090
5091    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5092    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5093        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5094            max_width = gCamCapability[camera_id]->raw_dim[i].width;
5095            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5096        }
5097    }
5098    return maxRawSize;
5099}
5100
5101
5102/*===========================================================================
5103 * FUNCTION   : calcMaxJpegDim
5104 *
5105 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5106 *
5107 * PARAMETERS :
5108 *
5109 * RETURN     : max_jpeg_dim
5110 *==========================================================================*/
5111cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5112{
5113    cam_dimension_t max_jpeg_dim;
5114    cam_dimension_t curr_jpeg_dim;
5115    max_jpeg_dim.width = 0;
5116    max_jpeg_dim.height = 0;
5117    curr_jpeg_dim.width = 0;
5118    curr_jpeg_dim.height = 0;
5119    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5120        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5121        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5122        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5123            max_jpeg_dim.width * max_jpeg_dim.height ) {
5124            max_jpeg_dim.width = curr_jpeg_dim.width;
5125            max_jpeg_dim.height = curr_jpeg_dim.height;
5126        }
5127    }
5128    return max_jpeg_dim;
5129}
5130
5131/*===========================================================================
5132 * FUNCTION   : addStreamConfig
5133 *
5134 * DESCRIPTION: adds the stream configuration to the array
5135 *
5136 * PARAMETERS :
5137 * @available_stream_configs : pointer to stream configuration array
5138 * @scalar_format            : scalar format
5139 * @dim                      : configuration dimension
5140 * @config_type              : input or output configuration type
5141 *
5142 * RETURN     : NONE
5143 *==========================================================================*/
5144void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5145        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5146{
5147    available_stream_configs.add(scalar_format);
5148    available_stream_configs.add(dim.width);
5149    available_stream_configs.add(dim.height);
5150    available_stream_configs.add(config_type);
5151}
5152
5153
5154/*===========================================================================
5155 * FUNCTION   : initStaticMetadata
5156 *
5157 * DESCRIPTION: initialize the static metadata
5158 *
5159 * PARAMETERS :
5160 *   @cameraId  : camera Id
5161 *
5162 * RETURN     : int32_t type of status
5163 *              0  -- success
5164 *              non-zero failure code
5165 *==========================================================================*/
5166int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5167{
5168    int rc = 0;
5169    CameraMetadata staticInfo;
5170    size_t count = 0;
5171    bool limitedDevice = false;
5172
5173    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5174     * guaranteed, its advertised as limited device */
5175    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5176            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5177
5178    uint8_t supportedHwLvl = limitedDevice ?
5179            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5180            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5181
5182    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5183            &supportedHwLvl, 1);
5184
5185    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5186    /*HAL 3 only*/
5187    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5188                    &gCamCapability[cameraId]->min_focus_distance, 1);
5189
5190    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5191                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5192
5193    /*should be using focal lengths but sensor doesn't provide that info now*/
5194    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5195                      &gCamCapability[cameraId]->focal_length,
5196                      1);
5197
5198    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5199                      gCamCapability[cameraId]->apertures,
5200                      gCamCapability[cameraId]->apertures_count);
5201
5202    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5203                gCamCapability[cameraId]->filter_densities,
5204                gCamCapability[cameraId]->filter_densities_count);
5205
5206
5207    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5208                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5209                      gCamCapability[cameraId]->optical_stab_modes_count);
5210
5211    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5212            gCamCapability[cameraId]->lens_shading_map_size.height};
5213    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5214                      lens_shading_map_size,
5215                      sizeof(lens_shading_map_size)/sizeof(int32_t));
5216
5217    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5218            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5219
5220    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5221            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5222
5223    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5224            &gCamCapability[cameraId]->max_frame_duration, 1);
5225
5226    camera_metadata_rational baseGainFactor = {
5227            gCamCapability[cameraId]->base_gain_factor.numerator,
5228            gCamCapability[cameraId]->base_gain_factor.denominator};
5229    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5230                      &baseGainFactor, 1);
5231
5232    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5233                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5234
5235    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5236            gCamCapability[cameraId]->pixel_array_size.height};
5237    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5238                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5239
5240    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5241                                                gCamCapability[cameraId]->active_array_size.top,
5242                                                gCamCapability[cameraId]->active_array_size.width,
5243                                                gCamCapability[cameraId]->active_array_size.height};
5244    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5245                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5246
5247    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5248            &gCamCapability[cameraId]->white_level, 1);
5249
5250    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5251            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5252
5253    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5254                      &gCamCapability[cameraId]->flash_charge_duration, 1);
5255
5256    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5257                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5258
5259    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5260    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5261                      (int32_t *)&maxFaces, 1);
5262
5263    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5264    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5265            &timestampSource, 1);
5266
5267    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5268                      &gCamCapability[cameraId]->histogram_size, 1);
5269
5270    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5271            &gCamCapability[cameraId]->max_histogram_count, 1);
5272
5273    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5274            gCamCapability[cameraId]->sharpness_map_size.height};
5275
5276    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5277            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5278
5279    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5280            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5281
5282    int32_t scalar_formats[] = {
5283            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5284            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5285            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5286            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5287            HAL_PIXEL_FORMAT_RAW10,
5288            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5289    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5290    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5291                      scalar_formats,
5292                      scalar_formats_count);
5293
5294    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5295    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5296    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5297            count, MAX_SIZES_CNT, available_processed_sizes);
5298    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5299            available_processed_sizes, count * 2);
5300
5301    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5302    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5303    makeTable(gCamCapability[cameraId]->raw_dim,
5304            count, MAX_SIZES_CNT, available_raw_sizes);
5305    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5306            available_raw_sizes, count * 2);
5307
5308    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5309    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5310    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5311            count, MAX_SIZES_CNT, available_fps_ranges);
5312    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5313            available_fps_ranges, count * 2);
5314
5315    camera_metadata_rational exposureCompensationStep = {
5316            gCamCapability[cameraId]->exp_compensation_step.numerator,
5317            gCamCapability[cameraId]->exp_compensation_step.denominator};
5318    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5319                      &exposureCompensationStep, 1);
5320
5321    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
5322    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5323                      availableVstabModes, sizeof(availableVstabModes));
5324
5325    /*HAL 1 and HAL 3 common*/
5326    float maxZoom = 4;
5327    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5328            &maxZoom, 1);
5329
5330    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5331    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5332
5333    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5334    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5335        max3aRegions[2] = 0; /* AF not supported */
5336    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5337            max3aRegions, 3);
5338
5339    uint8_t availableFaceDetectModes[] = {
5340            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
5341            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
5342    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5343            availableFaceDetectModes,
5344            sizeof(availableFaceDetectModes)/sizeof(availableFaceDetectModes[0]));
5345
5346    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5347                                           gCamCapability[cameraId]->exposure_compensation_max};
5348    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5349            exposureCompensationRange,
5350            sizeof(exposureCompensationRange)/sizeof(int32_t));
5351
5352    uint8_t lensFacing = (facingBack) ?
5353            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5354    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5355
5356    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5357                      available_thumbnail_sizes,
5358                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5359
5360    /*all sizes will be clubbed into this tag*/
5361    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5362    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5363    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5364            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5365            gCamCapability[cameraId]->max_downscale_factor);
5366    /*android.scaler.availableStreamConfigurations*/
5367    size_t max_stream_configs_size = count * scalar_formats_count * 4;
5368    Vector<int32_t> available_stream_configs;
5369    cam_dimension_t active_array_dim;
5370    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
5371    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
5372    /* Add input/output stream configurations for each scalar formats*/
5373    for (size_t j = 0; j < scalar_formats_count; j++) {
5374        switch (scalar_formats[j]) {
5375        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5376        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5377        case HAL_PIXEL_FORMAT_RAW10:
5378            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5379                addStreamConfig(available_stream_configs, scalar_formats[j],
5380                        gCamCapability[cameraId]->raw_dim[i],
5381                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5382            }
5383            break;
5384        case HAL_PIXEL_FORMAT_BLOB:
5385            cam_dimension_t jpeg_size;
5386            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5387                jpeg_size.width  = available_jpeg_sizes[i*2];
5388                jpeg_size.height = available_jpeg_sizes[i*2+1];
5389                addStreamConfig(available_stream_configs, scalar_formats[j],
5390                        jpeg_size,
5391                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5392            }
5393            break;
5394        case HAL_PIXEL_FORMAT_YCbCr_420_888:
5395        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5396        default:
5397            cam_dimension_t largest_picture_size;
5398            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5399            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5400                addStreamConfig(available_stream_configs, scalar_formats[j],
5401                        gCamCapability[cameraId]->picture_sizes_tbl[i],
5402                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5403                /* Book keep largest */
5404                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5405                        >= largest_picture_size.width &&
5406                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
5407                        >= largest_picture_size.height)
5408                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5409            }
5410            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5411            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5412                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5413                 addStreamConfig(available_stream_configs, scalar_formats[j],
5414                         largest_picture_size,
5415                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
5416            }
5417            break;
5418        }
5419    }
5420
5421    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5422                      available_stream_configs.array(), available_stream_configs.size());
5423    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5424    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5425
5426    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5427    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5428
5429    /* android.scaler.availableMinFrameDurations */
5430    int64_t available_min_durations[max_stream_configs_size];
5431    size_t idx = 0;
5432    for (size_t j = 0; j < scalar_formats_count; j++) {
5433        switch (scalar_formats[j]) {
5434        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5435        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5436        case HAL_PIXEL_FORMAT_RAW10:
5437            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5438                available_min_durations[idx] = scalar_formats[j];
5439                available_min_durations[idx+1] =
5440                    gCamCapability[cameraId]->raw_dim[i].width;
5441                available_min_durations[idx+2] =
5442                    gCamCapability[cameraId]->raw_dim[i].height;
5443                available_min_durations[idx+3] =
5444                    gCamCapability[cameraId]->raw_min_duration[i];
5445                idx+=4;
5446            }
5447            break;
5448        default:
5449            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5450                available_min_durations[idx] = scalar_formats[j];
5451                available_min_durations[idx+1] =
5452                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5453                available_min_durations[idx+2] =
5454                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5455                available_min_durations[idx+3] =
5456                    gCamCapability[cameraId]->picture_min_duration[i];
5457                idx+=4;
5458            }
5459            break;
5460        }
5461    }
5462    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5463                      &available_min_durations[0], idx);
5464
5465    Vector<int32_t> available_hfr_configs;
5466    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5467        int32_t fps = 0;
5468        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5469        case CAM_HFR_MODE_60FPS:
5470            fps = 60;
5471            break;
5472        case CAM_HFR_MODE_90FPS:
5473            fps = 90;
5474            break;
5475        case CAM_HFR_MODE_120FPS:
5476            fps = 120;
5477            break;
5478        case CAM_HFR_MODE_150FPS:
5479            fps = 150;
5480            break;
5481        case CAM_HFR_MODE_180FPS:
5482            fps = 180;
5483            break;
5484        case CAM_HFR_MODE_210FPS:
5485            fps = 210;
5486            break;
5487        case CAM_HFR_MODE_240FPS:
5488            fps = 240;
5489            break;
5490        case CAM_HFR_MODE_480FPS:
5491            fps = 480;
5492            break;
5493        case CAM_HFR_MODE_OFF:
5494        case CAM_HFR_MODE_MAX:
5495        default:
5496            break;
5497        }
5498
5499        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
5500        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
5501            /* For each HFR frame rate, need to advertise one variable fps range
5502             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5503             * [120, 120]. While camcorder preview alone is running [30, 120] is
5504             * set by the app. When video recording is started, [120, 120] is
5505             * set. This way sensor configuration does not change when recording
5506             * is started */
5507
5508            /* (width, height, fps_min, fps_max, batch_size_max) */
5509            available_hfr_configs.add(
5510                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5511            available_hfr_configs.add(
5512                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5513            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
5514            available_hfr_configs.add(fps);
5515            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5516
5517            /* (width, height, fps_min, fps_max, batch_size_max) */
5518            available_hfr_configs.add(
5519                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5520            available_hfr_configs.add(
5521                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5522            available_hfr_configs.add(fps);
5523            available_hfr_configs.add(fps);
5524            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5525       }
5526    }
5527    //Advertise HFR capability only if the property is set
5528    char prop[PROPERTY_VALUE_MAX];
5529    memset(prop, 0, sizeof(prop));
5530    property_get("persist.camera.hal3hfr.enable", prop, "1");
5531    uint8_t hfrEnable = (uint8_t)atoi(prop);
5532
5533    if(hfrEnable && available_hfr_configs.array()) {
5534        staticInfo.update(
5535                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
5536                available_hfr_configs.array(), available_hfr_configs.size());
5537    }
5538
5539    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
5540    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
5541                      &max_jpeg_size, 1);
5542
5543    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
5544    size_t size = 0;
5545    count = CAM_EFFECT_MODE_MAX;
5546    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
5547    for (size_t i = 0; i < count; i++) {
5548        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5549                gCamCapability[cameraId]->supported_effects[i]);
5550        if (NAME_NOT_FOUND != val) {
5551            avail_effects[size] = (uint8_t)val;
5552            size++;
5553        }
5554    }
5555    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
5556                      avail_effects,
5557                      size);
5558
5559    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
5560    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
5561    size_t supported_scene_modes_cnt = 0;
5562    count = CAM_SCENE_MODE_MAX;
5563    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
5564    for (size_t i = 0; i < count; i++) {
5565        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
5566                CAM_SCENE_MODE_OFF) {
5567            int val = lookupFwkName(SCENE_MODES_MAP,
5568                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
5569                    gCamCapability[cameraId]->supported_scene_modes[i]);
5570            if (NAME_NOT_FOUND != val) {
5571                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
5572                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
5573                supported_scene_modes_cnt++;
5574            }
5575        }
5576    }
5577    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
5578                      avail_scene_modes,
5579                      supported_scene_modes_cnt);
5580
5581    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
5582    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
5583                      supported_scene_modes_cnt,
5584                      CAM_SCENE_MODE_MAX,
5585                      scene_mode_overrides,
5586                      supported_indexes,
5587                      cameraId);
5588
5589    if (supported_scene_modes_cnt == 0) {
5590        supported_scene_modes_cnt = 1;
5591        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
5592    }
5593
5594    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
5595            scene_mode_overrides, supported_scene_modes_cnt * 3);
5596
5597    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
5598                                         ANDROID_CONTROL_MODE_AUTO,
5599                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
5600    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
5601            available_control_modes,
5602            3);
5603
5604    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
5605    size = 0;
5606    count = CAM_ANTIBANDING_MODE_MAX;
5607    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
5608    for (size_t i = 0; i < count; i++) {
5609        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5610                gCamCapability[cameraId]->supported_antibandings[i]);
5611        if (NAME_NOT_FOUND != val) {
5612            avail_antibanding_modes[size] = (uint8_t)val;
5613            size++;
5614        }
5615
5616    }
5617    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
5618                      avail_antibanding_modes,
5619                      size);
5620
5621    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
5622    size = 0;
5623    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
5624    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
5625    if (0 == count) {
5626        avail_abberation_modes[0] =
5627                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5628        size++;
5629    } else {
5630        for (size_t i = 0; i < count; i++) {
5631            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5632                    gCamCapability[cameraId]->aberration_modes[i]);
5633            if (NAME_NOT_FOUND != val) {
5634                avail_abberation_modes[size] = (uint8_t)val;
5635                size++;
5636            } else {
5637                ALOGE("%s: Invalid CAC mode %d", __func__,
5638                        gCamCapability[cameraId]->aberration_modes[i]);
5639                break;
5640            }
5641        }
5642
5643    }
5644    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
5645            avail_abberation_modes,
5646            size);
5647
5648    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
5649    size = 0;
5650    count = CAM_FOCUS_MODE_MAX;
5651    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
5652    for (size_t i = 0; i < count; i++) {
5653        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
5654                gCamCapability[cameraId]->supported_focus_modes[i]);
5655        if (NAME_NOT_FOUND != val) {
5656            avail_af_modes[size] = (uint8_t)val;
5657            size++;
5658        }
5659    }
5660    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
5661                      avail_af_modes,
5662                      size);
5663
5664    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
5665    size = 0;
5666    count = CAM_WB_MODE_MAX;
5667    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
5668    for (size_t i = 0; i < count; i++) {
5669        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5670                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
5671                gCamCapability[cameraId]->supported_white_balances[i]);
5672        if (NAME_NOT_FOUND != val) {
5673            avail_awb_modes[size] = (uint8_t)val;
5674            size++;
5675        }
5676    }
5677    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
5678                      avail_awb_modes,
5679                      size);
5680
5681    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
5682    count = CAM_FLASH_FIRING_LEVEL_MAX;
5683    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
5684            count);
5685    for (size_t i = 0; i < count; i++) {
5686        available_flash_levels[i] =
5687                gCamCapability[cameraId]->supported_firing_levels[i];
5688    }
5689    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
5690            available_flash_levels, count);
5691
5692    uint8_t flashAvailable;
5693    if (gCamCapability[cameraId]->flash_available)
5694        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
5695    else
5696        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
5697    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
5698            &flashAvailable, 1);
5699
5700    Vector<uint8_t> avail_ae_modes;
5701    count = CAM_AE_MODE_MAX;
5702    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
5703    for (size_t i = 0; i < count; i++) {
5704        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
5705    }
5706    if (flashAvailable) {
5707        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
5708        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
5709        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
5710    }
5711    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
5712                      avail_ae_modes.array(),
5713                      avail_ae_modes.size());
5714
5715    int32_t sensitivity_range[2];
5716    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
5717    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
5718    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
5719                      sensitivity_range,
5720                      sizeof(sensitivity_range) / sizeof(int32_t));
5721
5722    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
5723                      &gCamCapability[cameraId]->max_analog_sensitivity,
5724                      1);
5725
5726    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
5727    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
5728                      &sensor_orientation,
5729                      1);
5730
5731    int32_t max_output_streams[] = {
5732            MAX_STALLING_STREAMS,
5733            MAX_PROCESSED_STREAMS,
5734            MAX_RAW_STREAMS};
5735    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
5736            max_output_streams,
5737            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
5738
5739    uint8_t avail_leds = 0;
5740    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
5741                      &avail_leds, 0);
5742
5743    uint8_t focus_dist_calibrated;
5744    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
5745            gCamCapability[cameraId]->focus_dist_calibrated);
5746    if (NAME_NOT_FOUND != val) {
5747        focus_dist_calibrated = (uint8_t)val;
5748        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
5749                     &focus_dist_calibrated, 1);
5750    }
5751
5752    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
5753    size = 0;
5754    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
5755            MAX_TEST_PATTERN_CNT);
5756    for (size_t i = 0; i < count; i++) {
5757        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
5758                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
5759        if (NAME_NOT_FOUND != testpatternMode) {
5760            avail_testpattern_modes[size] = testpatternMode;
5761            size++;
5762        }
5763    }
5764    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
5765                      avail_testpattern_modes,
5766                      size);
5767
5768    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
5769    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
5770                      &max_pipeline_depth,
5771                      1);
5772
5773    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
5774    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
5775                      &partial_result_count,
5776                       1);
5777
5778    int32_t max_stall_duration = MAX_REPROCESS_STALL;
5779    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
5780
5781    Vector<uint8_t> available_capabilities;
5782    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
5783    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
5784    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
5785    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
5786    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
5787    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
5788    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
5789    if (hfrEnable) {
5790        available_capabilities.add(
5791                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
5792    }
5793
5794    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
5795        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
5796    }
5797    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
5798            available_capabilities.array(),
5799            available_capabilities.size());
5800
5801    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
5802    //BURST_CAPTURE.
5803    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
5804            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
5805
5806    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
5807            &aeLockAvailable, 1);
5808
5809    //awbLockAvailable to be set to true if capabilities has
5810    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
5811    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
5812            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
5813
5814    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
5815            &awbLockAvailable, 1);
5816
5817    int32_t max_input_streams = 1;
5818    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
5819                      &max_input_streams,
5820                      1);
5821
5822    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1, HAL_PIXEL_FORMAT_BLOB,
5823            HAL_PIXEL_FORMAT_YCbCr_420_888, 1,HAL_PIXEL_FORMAT_BLOB};
5824    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
5825                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
5826
5827    int32_t max_latency = (limitedDevice) ?
5828            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
5829    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
5830                      &max_latency,
5831                      1);
5832
5833    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
5834                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
5835    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
5836            available_hot_pixel_modes,
5837            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
5838
5839    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
5840                                         ANDROID_SHADING_MODE_FAST,
5841                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
5842    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
5843                      available_shading_modes,
5844                      3);
5845
5846    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
5847                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
5848    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
5849                      available_lens_shading_map_modes,
5850                      2);
5851
5852    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
5853                                      ANDROID_EDGE_MODE_FAST,
5854                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
5855                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
5856    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
5857            available_edge_modes,
5858            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
5859
5860    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
5861                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
5862                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
5863                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
5864                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
5865    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
5866            available_noise_red_modes,
5867            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
5868
5869    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
5870                                         ANDROID_TONEMAP_MODE_FAST,
5871                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
5872    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
5873            available_tonemap_modes,
5874            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
5875
5876    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
5877    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
5878            available_hot_pixel_map_modes,
5879            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
5880
5881    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
5882            gCamCapability[cameraId]->reference_illuminant1);
5883    if (NAME_NOT_FOUND != val) {
5884        uint8_t fwkReferenceIlluminant = (uint8_t)val;
5885        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
5886    }
5887
5888    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
5889            gCamCapability[cameraId]->reference_illuminant2);
5890    if (NAME_NOT_FOUND != val) {
5891        uint8_t fwkReferenceIlluminant = (uint8_t)val;
5892        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
5893    }
5894
5895    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
5896            (void *)gCamCapability[cameraId]->forward_matrix1,
5897            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
5898
5899    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
5900            (void *)gCamCapability[cameraId]->forward_matrix2,
5901            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
5902
5903    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
5904            (void *)gCamCapability[cameraId]->color_transform1,
5905            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
5906
5907    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
5908            (void *)gCamCapability[cameraId]->color_transform2,
5909            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
5910
5911    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
5912            (void *)gCamCapability[cameraId]->calibration_transform1,
5913            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
5914
5915    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
5916            (void *)gCamCapability[cameraId]->calibration_transform2,
5917            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
5918
5919    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
5920       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
5921       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
5922       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
5923       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
5924       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5925       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
5926       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
5927       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
5928       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
5929       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
5930       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
5931       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
5932       ANDROID_JPEG_GPS_COORDINATES,
5933       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
5934       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
5935       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
5936       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
5937       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
5938       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
5939       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
5940       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
5941       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
5942       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
5943       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
5944       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
5945       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
5946       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
5947       ANDROID_BLACK_LEVEL_LOCK };
5948
5949    size_t request_keys_cnt =
5950            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
5951    Vector<int32_t> available_request_keys;
5952    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
5953    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
5954        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
5955    }
5956    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
5957            available_request_keys.array(), available_request_keys.size());
5958
5959    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
5960       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
5961       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
5962       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
5963       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
5964       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
5965       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
5966       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
5967       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
5968       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
5969       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
5970       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
5971       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
5972       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
5973       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5974       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
5975       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
5976       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
5977       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
5978       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5979       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
5980       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
5981       ANDROID_STATISTICS_FACE_SCORES};
5982    size_t result_keys_cnt =
5983            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
5984
5985    Vector<int32_t> available_result_keys;
5986    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
5987    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
5988        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
5989    }
5990    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
5991       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
5992       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
5993    }
5994    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
5995            available_result_keys.array(), available_result_keys.size());
5996
5997    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
5998       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5999       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6000       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6001       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6002       ANDROID_SCALER_CROPPING_TYPE,
6003       ANDROID_SYNC_MAX_LATENCY,
6004       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6005       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6006       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6007       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6008       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6009       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6010       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6011       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6012       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6013       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6014       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6015       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6016       ANDROID_LENS_FACING,
6017       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6018       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6019       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6020       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6021       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6022       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6023       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6024       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6025       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6026       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6027       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6028       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6029       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6030       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6031       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6032       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6033       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6034       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6035       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6036       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6037       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6038       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6039       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6040       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6041       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6042       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6043       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6044       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6045       ANDROID_TONEMAP_MAX_CURVE_POINTS,
6046       ANDROID_CONTROL_AVAILABLE_MODES,
6047       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6048       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6049       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6050       ANDROID_SHADING_AVAILABLE_MODES,
6051       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6052    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6053                      available_characteristics_keys,
6054                      sizeof(available_characteristics_keys)/sizeof(int32_t));
6055
6056    /*available stall durations depend on the hw + sw and will be different for different devices */
6057    /*have to add for raw after implementation*/
6058    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6059    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6060
6061    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6062    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6063            MAX_SIZES_CNT);
6064    size_t available_stall_size = count * 4;
6065    int64_t available_stall_durations[available_stall_size];
6066    idx = 0;
6067    for (uint32_t j = 0; j < stall_formats_count; j++) {
6068       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6069          for (uint32_t i = 0; i < count; i++) {
6070             available_stall_durations[idx]   = stall_formats[j];
6071             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6072             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6073             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6074             idx+=4;
6075          }
6076       } else {
6077          for (uint32_t i = 0; i < raw_count; i++) {
6078             available_stall_durations[idx]   = stall_formats[j];
6079             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6080             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6081             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6082             idx+=4;
6083          }
6084       }
6085    }
6086    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6087                      available_stall_durations,
6088                      idx);
6089    //QCAMERA3_OPAQUE_RAW
6090    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6091    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6092    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6093    case LEGACY_RAW:
6094        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6095            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6096        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6097            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6098        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6099            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6100        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6101        break;
6102    case MIPI_RAW:
6103        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6104            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6105        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6106            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6107        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6108            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6109        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6110        break;
6111    default:
6112        ALOGE("%s: unknown opaque_raw_format %d", __func__,
6113                gCamCapability[cameraId]->opaque_raw_fmt);
6114        break;
6115    }
6116    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6117
6118    int32_t strides[3*raw_count];
6119    for (size_t i = 0; i < raw_count; i++) {
6120        cam_stream_buf_plane_info_t buf_planes;
6121        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6122        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6123        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6124            &gCamCapability[cameraId]->padding_info, &buf_planes);
6125        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6126    }
6127    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6128            3*raw_count);
6129
6130    gStaticMetadata[cameraId] = staticInfo.release();
6131    return rc;
6132}
6133
6134/*===========================================================================
6135 * FUNCTION   : makeTable
6136 *
6137 * DESCRIPTION: make a table of sizes
6138 *
6139 * PARAMETERS :
6140 *
6141 *
6142 *==========================================================================*/
6143void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6144        size_t max_size, int32_t *sizeTable)
6145{
6146    size_t j = 0;
6147    if (size > max_size) {
6148       size = max_size;
6149    }
6150    for (size_t i = 0; i < size; i++) {
6151        sizeTable[j] = dimTable[i].width;
6152        sizeTable[j+1] = dimTable[i].height;
6153        j+=2;
6154    }
6155}
6156
6157/*===========================================================================
6158 * FUNCTION   : makeFPSTable
6159 *
6160 * DESCRIPTION: make a table of fps ranges
6161 *
6162 * PARAMETERS :
6163 *
6164 *==========================================================================*/
6165void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6166        size_t max_size, int32_t *fpsRangesTable)
6167{
6168    size_t j = 0;
6169    if (size > max_size) {
6170       size = max_size;
6171    }
6172    for (size_t i = 0; i < size; i++) {
6173        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6174        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6175        j+=2;
6176    }
6177}
6178
6179/*===========================================================================
6180 * FUNCTION   : makeOverridesList
6181 *
6182 * DESCRIPTION: make a list of scene mode overrides
6183 *
6184 * PARAMETERS :
6185 *
6186 *
6187 *==========================================================================*/
6188void QCamera3HardwareInterface::makeOverridesList(
6189        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6190        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6191{
6192    /*daemon will give a list of overrides for all scene modes.
6193      However we should send the fwk only the overrides for the scene modes
6194      supported by the framework*/
6195    size_t j = 0;
6196    if (size > max_size) {
6197       size = max_size;
6198    }
6199    size_t focus_count = CAM_FOCUS_MODE_MAX;
6200    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6201            focus_count);
6202    for (size_t i = 0; i < size; i++) {
6203        bool supt = false;
6204        size_t index = supported_indexes[i];
6205        overridesList[j] = gCamCapability[camera_id]->flash_available ?
6206                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6207        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6208                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6209                overridesTable[index].awb_mode);
6210        if (NAME_NOT_FOUND != val) {
6211            overridesList[j+1] = (uint8_t)val;
6212        }
6213        uint8_t focus_override = overridesTable[index].af_mode;
6214        for (size_t k = 0; k < focus_count; k++) {
6215           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6216              supt = true;
6217              break;
6218           }
6219        }
6220        if (supt) {
6221            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6222                    focus_override);
6223            if (NAME_NOT_FOUND != val) {
6224                overridesList[j+2] = (uint8_t)val;
6225            }
6226        } else {
6227           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6228        }
6229        j+=3;
6230    }
6231}
6232
6233/*===========================================================================
6234 * FUNCTION   : filterJpegSizes
6235 *
6236 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6237 *              could be downscaled to
6238 *
6239 * PARAMETERS :
6240 *
6241 * RETURN     : length of jpegSizes array
6242 *==========================================================================*/
6243
6244size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6245        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6246        uint8_t downscale_factor)
6247{
6248    if (0 == downscale_factor) {
6249        downscale_factor = 1;
6250    }
6251
6252    int32_t min_width = active_array_size.width / downscale_factor;
6253    int32_t min_height = active_array_size.height / downscale_factor;
6254    size_t jpegSizesCnt = 0;
6255    if (processedSizesCnt > maxCount) {
6256        processedSizesCnt = maxCount;
6257    }
6258    for (size_t i = 0; i < processedSizesCnt; i+=2) {
6259        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6260            jpegSizes[jpegSizesCnt] = processedSizes[i];
6261            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6262            jpegSizesCnt += 2;
6263        }
6264    }
6265    return jpegSizesCnt;
6266}
6267
6268/*===========================================================================
6269 * FUNCTION   : getPreviewHalPixelFormat
6270 *
6271 * DESCRIPTION: convert the format to type recognized by framework
6272 *
6273 * PARAMETERS : format : the format from backend
6274 *
6275 ** RETURN    : format recognized by framework
6276 *
6277 *==========================================================================*/
6278int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6279{
6280    int32_t halPixelFormat;
6281
6282    switch (format) {
6283    case CAM_FORMAT_YUV_420_NV12:
6284        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6285        break;
6286    case CAM_FORMAT_YUV_420_NV21:
6287        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6288        break;
6289    case CAM_FORMAT_YUV_420_NV21_ADRENO:
6290        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6291        break;
6292    case CAM_FORMAT_YUV_420_YV12:
6293        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6294        break;
6295    case CAM_FORMAT_YUV_422_NV16:
6296    case CAM_FORMAT_YUV_422_NV61:
6297    default:
6298        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6299        break;
6300    }
6301    return halPixelFormat;
6302}
6303
6304/*===========================================================================
6305 * FUNCTION   : computeNoiseModelEntryS
6306 *
6307 * DESCRIPTION: function to map a given sensitivity to the S noise
6308 *              model parameters in the DNG noise model.
6309 *
6310 * PARAMETERS : sens : the sensor sensitivity
6311 *
6312 ** RETURN    : S (sensor amplification) noise
6313 *
6314 *==========================================================================*/
6315double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6316    double s = gCamCapability[mCameraId]->gradient_S * sens +
6317            gCamCapability[mCameraId]->offset_S;
6318    return ((s < 0.0) ? 0.0 : s);
6319}
6320
6321/*===========================================================================
6322 * FUNCTION   : computeNoiseModelEntryO
6323 *
6324 * DESCRIPTION: function to map a given sensitivity to the O noise
6325 *              model parameters in the DNG noise model.
6326 *
6327 * PARAMETERS : sens : the sensor sensitivity
6328 *
6329 ** RETURN    : O (sensor readout) noise
6330 *
6331 *==========================================================================*/
6332double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6333    double o = gCamCapability[mCameraId]->gradient_O * sens +
6334            gCamCapability[mCameraId]->offset_O;
6335    return ((o < 0.0) ? 0.0 : o);
6336}
6337
6338/*===========================================================================
6339 * FUNCTION   : getSensorSensitivity
6340 *
6341 * DESCRIPTION: convert iso_mode to an integer value
6342 *
6343 * PARAMETERS : iso_mode : the iso_mode supported by sensor
6344 *
6345 ** RETURN    : sensitivity supported by sensor
6346 *
6347 *==========================================================================*/
6348int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6349{
6350    int32_t sensitivity;
6351
6352    switch (iso_mode) {
6353    case CAM_ISO_MODE_100:
6354        sensitivity = 100;
6355        break;
6356    case CAM_ISO_MODE_200:
6357        sensitivity = 200;
6358        break;
6359    case CAM_ISO_MODE_400:
6360        sensitivity = 400;
6361        break;
6362    case CAM_ISO_MODE_800:
6363        sensitivity = 800;
6364        break;
6365    case CAM_ISO_MODE_1600:
6366        sensitivity = 1600;
6367        break;
6368    default:
6369        sensitivity = -1;
6370        break;
6371    }
6372    return sensitivity;
6373}
6374
6375/*===========================================================================
6376 * FUNCTION   : getCamInfo
6377 *
6378 * DESCRIPTION: query camera capabilities
6379 *
6380 * PARAMETERS :
6381 *   @cameraId  : camera Id
6382 *   @info      : camera info struct to be filled in with camera capabilities
6383 *
6384 * RETURN     : int type of status
6385 *              NO_ERROR  -- success
6386 *              none-zero failure code
6387 *==========================================================================*/
6388int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6389        struct camera_info *info)
6390{
6391    ATRACE_CALL();
6392    int rc = 0;
6393
6394    pthread_mutex_lock(&gCamLock);
6395    if (NULL == gCamCapability[cameraId]) {
6396        rc = initCapabilities(cameraId);
6397        if (rc < 0) {
6398            pthread_mutex_unlock(&gCamLock);
6399            return rc;
6400        }
6401    }
6402
6403    if (NULL == gStaticMetadata[cameraId]) {
6404        rc = initStaticMetadata(cameraId);
6405        if (rc < 0) {
6406            pthread_mutex_unlock(&gCamLock);
6407            return rc;
6408        }
6409    }
6410
6411    switch(gCamCapability[cameraId]->position) {
6412    case CAM_POSITION_BACK:
6413        info->facing = CAMERA_FACING_BACK;
6414        break;
6415
6416    case CAM_POSITION_FRONT:
6417        info->facing = CAMERA_FACING_FRONT;
6418        break;
6419
6420    default:
6421        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6422        rc = -1;
6423        break;
6424    }
6425
6426
6427    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6428    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6429    info->static_camera_characteristics = gStaticMetadata[cameraId];
6430
6431    //For now assume both cameras can operate independently.
6432    info->conflicting_devices = NULL;
6433    info->conflicting_devices_length = 0;
6434
6435    //resource cost is 100 * MIN(1.0, m/M),
6436    //where m is throughput requirement with maximum stream configuration
6437    //and M is CPP maximum throughput.
6438    float max_fps = 0.0;
6439    for (uint32_t i = 0;
6440            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
6441        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
6442            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
6443    }
6444    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
6445            gCamCapability[cameraId]->active_array_size.width *
6446            gCamCapability[cameraId]->active_array_size.height * max_fps /
6447            gCamCapability[cameraId]->max_pixel_bandwidth;
6448    info->resource_cost = 100 * MIN(1.0, ratio);
6449    ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
6450            info->resource_cost);
6451
6452    pthread_mutex_unlock(&gCamLock);
6453    return rc;
6454}
6455
6456/*===========================================================================
6457 * FUNCTION   : translateCapabilityToMetadata
6458 *
6459 * DESCRIPTION: translate the capability into camera_metadata_t
6460 *
6461 * PARAMETERS : type of the request
6462 *
6463 *
6464 * RETURN     : success: camera_metadata_t*
6465 *              failure: NULL
6466 *
6467 *==========================================================================*/
6468camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6469{
6470    if (mDefaultMetadata[type] != NULL) {
6471        return mDefaultMetadata[type];
6472    }
6473    //first time we are handling this request
6474    //fill up the metadata structure using the wrapper class
6475    CameraMetadata settings;
6476    //translate from cam_capability_t to camera_metadata_tag_t
6477    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6478    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6479    int32_t defaultRequestID = 0;
6480    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6481
6482    /* OIS disable */
6483    char ois_prop[PROPERTY_VALUE_MAX];
6484    memset(ois_prop, 0, sizeof(ois_prop));
6485    property_get("persist.camera.ois.disable", ois_prop, "0");
6486    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6487
6488    /* Force video to use OIS */
6489    char videoOisProp[PROPERTY_VALUE_MAX];
6490    memset(videoOisProp, 0, sizeof(videoOisProp));
6491    property_get("persist.camera.ois.video", videoOisProp, "1");
6492    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6493
6494    uint8_t controlIntent = 0;
6495    uint8_t focusMode;
6496    uint8_t vsMode;
6497    uint8_t optStabMode;
6498    uint8_t cacMode;
6499    uint8_t edge_mode;
6500    uint8_t noise_red_mode;
6501    uint8_t tonemap_mode;
6502    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6503    switch (type) {
6504      case CAMERA3_TEMPLATE_PREVIEW:
6505        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
6506        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6507        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6508        edge_mode = ANDROID_EDGE_MODE_FAST;
6509        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6510        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6511        break;
6512      case CAMERA3_TEMPLATE_STILL_CAPTURE:
6513        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
6514        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6515        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6516        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
6517        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
6518        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
6519        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
6520        settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
6521        break;
6522      case CAMERA3_TEMPLATE_VIDEO_RECORD:
6523        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
6524        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6525        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6526        edge_mode = ANDROID_EDGE_MODE_FAST;
6527        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6528        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6529        if (forceVideoOis)
6530            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6531        break;
6532      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
6533        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
6534        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6535        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6536        edge_mode = ANDROID_EDGE_MODE_FAST;
6537        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6538        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6539        if (forceVideoOis)
6540            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6541        break;
6542      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
6543        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
6544        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6545        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6546        edge_mode = ANDROID_EDGE_MODE_FAST;
6547        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6548        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6549        break;
6550      case CAMERA3_TEMPLATE_MANUAL:
6551        edge_mode = ANDROID_EDGE_MODE_FAST;
6552        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6553        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6554        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
6555        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6556        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6557        break;
6558      default:
6559        edge_mode = ANDROID_EDGE_MODE_FAST;
6560        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6561        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6562        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
6563        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6564        break;
6565    }
6566    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
6567    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
6568    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
6569        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6570    }
6571    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
6572
6573    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6574            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
6575        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6576    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6577            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
6578            || ois_disable)
6579        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6580    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
6581
6582    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6583            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
6584
6585    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
6586    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
6587
6588    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
6589    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
6590
6591    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
6592    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
6593
6594    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
6595    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
6596
6597    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
6598    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
6599
6600    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
6601    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
6602
6603    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
6604    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
6605
6606    /*flash*/
6607    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
6608    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
6609
6610    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
6611    settings.update(ANDROID_FLASH_FIRING_POWER,
6612            &flashFiringLevel, 1);
6613
6614    /* lens */
6615    float default_aperture = gCamCapability[mCameraId]->apertures[0];
6616    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
6617
6618    if (gCamCapability[mCameraId]->filter_densities_count) {
6619        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
6620        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
6621                        gCamCapability[mCameraId]->filter_densities_count);
6622    }
6623
6624    float default_focal_length = gCamCapability[mCameraId]->focal_length;
6625    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
6626
6627    float default_focus_distance = 0;
6628    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
6629
6630    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
6631    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
6632
6633    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6634    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6635
6636    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
6637    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
6638
6639    /* face detection (default to OFF) */
6640    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
6641    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
6642
6643    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
6644    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
6645
6646    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
6647    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
6648
6649    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6650    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6651
6652    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
6653    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
6654
6655    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
6656    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
6657
6658    /* Exposure time(Update the Min Exposure Time)*/
6659    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
6660    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
6661
6662    /* frame duration */
6663    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
6664    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
6665
6666    /* sensitivity */
6667    static const int32_t default_sensitivity = 100;
6668    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
6669
6670    /*edge mode*/
6671    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
6672
6673    /*noise reduction mode*/
6674    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
6675
6676    /*color correction mode*/
6677    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
6678    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
6679
6680    /*transform matrix mode*/
6681    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
6682
6683    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
6684    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
6685
6686    int32_t scaler_crop_region[4];
6687    scaler_crop_region[0] = 0;
6688    scaler_crop_region[1] = 0;
6689    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
6690    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
6691    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
6692
6693    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
6694    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
6695
6696    /*focus distance*/
6697    float focus_distance = 0.0;
6698    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
6699
6700    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
6701    float max_range = 0.0;
6702    float max_fixed_fps = 0.0;
6703    int32_t fps_range[2] = {0, 0};
6704    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
6705            i++) {
6706        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
6707            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6708        if (type == CAMERA3_TEMPLATE_PREVIEW ||
6709                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
6710                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
6711            if (range > max_range) {
6712                fps_range[0] =
6713                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6714                fps_range[1] =
6715                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6716                max_range = range;
6717            }
6718        } else {
6719            if (range < 0.01 && max_fixed_fps <
6720                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
6721                fps_range[0] =
6722                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6723                fps_range[1] =
6724                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6725                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6726            }
6727        }
6728    }
6729    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
6730
6731    /*precapture trigger*/
6732    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
6733    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
6734
6735    /*af trigger*/
6736    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
6737    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
6738
6739    /* ae & af regions */
6740    int32_t active_region[] = {
6741            gCamCapability[mCameraId]->active_array_size.left,
6742            gCamCapability[mCameraId]->active_array_size.top,
6743            gCamCapability[mCameraId]->active_array_size.left +
6744                    gCamCapability[mCameraId]->active_array_size.width,
6745            gCamCapability[mCameraId]->active_array_size.top +
6746                    gCamCapability[mCameraId]->active_array_size.height,
6747            0};
6748    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
6749            sizeof(active_region) / sizeof(active_region[0]));
6750    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
6751            sizeof(active_region) / sizeof(active_region[0]));
6752
6753    /* black level lock */
6754    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
6755    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
6756
6757    /* lens shading map mode */
6758    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
6759    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
6760        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
6761    }
6762    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
6763
6764    //special defaults for manual template
6765    if (type == CAMERA3_TEMPLATE_MANUAL) {
6766        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
6767        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
6768
6769        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
6770        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
6771
6772        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
6773        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
6774
6775        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
6776        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
6777
6778        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
6779        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
6780
6781        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
6782        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
6783    }
6784
6785    /* CDS default */
6786    char prop[PROPERTY_VALUE_MAX];
6787    memset(prop, 0, sizeof(prop));
6788    property_get("persist.camera.CDS", prop, "Auto");
6789    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
6790    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
6791    if (CAM_CDS_MODE_MAX == cds_mode) {
6792        cds_mode = CAM_CDS_MODE_AUTO;
6793    }
6794    int32_t mode = cds_mode;
6795    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
6796
6797    mDefaultMetadata[type] = settings.release();
6798
6799    return mDefaultMetadata[type];
6800}
6801
6802/*===========================================================================
6803 * FUNCTION   : setFrameParameters
6804 *
6805 * DESCRIPTION: set parameters per frame as requested in the metadata from
6806 *              framework
6807 *
6808 * PARAMETERS :
6809 *   @request   : request that needs to be serviced
6810 *   @streamID : Stream ID of all the requested streams
6811 *   @blob_request: Whether this request is a blob request or not
6812 *
6813 * RETURN     : success: NO_ERROR
6814 *              failure:
6815 *==========================================================================*/
6816int QCamera3HardwareInterface::setFrameParameters(
6817                    camera3_capture_request_t *request,
6818                    cam_stream_ID_t streamID,
6819                    int blob_request,
6820                    uint32_t snapshotStreamId)
6821{
6822    /*translate from camera_metadata_t type to parm_type_t*/
6823    int rc = 0;
6824    int32_t hal_version = CAM_HAL_V3;
6825
6826    clear_metadata_buffer(mParameters);
6827    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
6828        ALOGE("%s: Failed to set hal version in the parameters", __func__);
6829        return BAD_VALUE;
6830    }
6831
6832    /*we need to update the frame number in the parameters*/
6833    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
6834            request->frame_number)) {
6835        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
6836        return BAD_VALUE;
6837    }
6838
6839    /* Update stream id of all the requested buffers */
6840    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
6841        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
6842        return BAD_VALUE;
6843    }
6844
6845    if (mUpdateDebugLevel) {
6846        uint32_t dummyDebugLevel = 0;
6847        /* The value of dummyDebugLevel is irrelavent. On
6848         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
6849        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
6850                dummyDebugLevel)) {
6851            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
6852            return BAD_VALUE;
6853        }
6854        mUpdateDebugLevel = false;
6855    }
6856
6857    if(request->settings != NULL){
6858        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
6859        if (blob_request)
6860            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
6861    }
6862
6863    return rc;
6864}
6865
6866/*===========================================================================
6867 * FUNCTION   : setReprocParameters
6868 *
6869 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
6870 *              return it.
6871 *
6872 * PARAMETERS :
6873 *   @request   : request that needs to be serviced
6874 *
6875 * RETURN     : success: NO_ERROR
6876 *              failure:
6877 *==========================================================================*/
6878int32_t QCamera3HardwareInterface::setReprocParameters(
6879        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
6880        uint32_t snapshotStreamId)
6881{
6882    /*translate from camera_metadata_t type to parm_type_t*/
6883    int rc = 0;
6884
6885    if (NULL == request->settings){
6886        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
6887        return BAD_VALUE;
6888    }
6889
6890    if (NULL == reprocParam) {
6891        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
6892        return BAD_VALUE;
6893    }
6894    clear_metadata_buffer(reprocParam);
6895
6896    /*we need to update the frame number in the parameters*/
6897    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
6898            request->frame_number)) {
6899        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
6900        return BAD_VALUE;
6901    }
6902
6903    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
6904    if (rc < 0) {
6905        ALOGE("%s: Failed to translate reproc request", __func__);
6906        return rc;
6907    }
6908
6909    CameraMetadata frame_settings;
6910    frame_settings = request->settings;
6911    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
6912            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
6913        int32_t *crop_count =
6914                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
6915        int32_t *crop_data =
6916                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
6917        int32_t *roi_map =
6918                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
6919        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
6920            cam_crop_data_t crop_meta;
6921            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
6922            crop_meta.num_of_streams = 1;
6923            crop_meta.crop_info[0].crop.left   = crop_data[0];
6924            crop_meta.crop_info[0].crop.top    = crop_data[1];
6925            crop_meta.crop_info[0].crop.width  = crop_data[2];
6926            crop_meta.crop_info[0].crop.height = crop_data[3];
6927
6928            crop_meta.crop_info[0].roi_map.left =
6929                    roi_map[0];
6930            crop_meta.crop_info[0].roi_map.top =
6931                    roi_map[1];
6932            crop_meta.crop_info[0].roi_map.width =
6933                    roi_map[2];
6934            crop_meta.crop_info[0].roi_map.height =
6935                    roi_map[3];
6936
6937            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
6938                rc = BAD_VALUE;
6939            }
6940            CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
6941                    __func__,
6942                    request->input_buffer->stream,
6943                    crop_meta.crop_info[0].crop.left,
6944                    crop_meta.crop_info[0].crop.top,
6945                    crop_meta.crop_info[0].crop.width,
6946                    crop_meta.crop_info[0].crop.height);
6947            CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
6948                    __func__,
6949                    request->input_buffer->stream,
6950                    crop_meta.crop_info[0].roi_map.left,
6951                    crop_meta.crop_info[0].roi_map.top,
6952                    crop_meta.crop_info[0].roi_map.width,
6953                    crop_meta.crop_info[0].roi_map.height);
6954            } else {
6955                ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
6956            }
6957    } else {
6958        ALOGE("%s: No crop data from matching output stream", __func__);
6959    }
6960
6961    return rc;
6962}
6963
6964/*===========================================================================
6965 * FUNCTION   : setHalFpsRange
6966 *
6967 * DESCRIPTION: set FPS range parameter
6968 *
6969 *
6970 * PARAMETERS :
6971 *   @settings    : Metadata from framework
6972 *   @hal_metadata: Metadata buffer
6973 *
6974 *
6975 * RETURN     : success: NO_ERROR
6976 *              failure:
6977 *==========================================================================*/
6978int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
6979        metadata_buffer_t *hal_metadata)
6980{
6981    int32_t rc = NO_ERROR;
6982    cam_fps_range_t fps_range;
6983    fps_range.min_fps = (float)
6984            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
6985    fps_range.max_fps = (float)
6986            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
6987    fps_range.video_min_fps = fps_range.min_fps;
6988    fps_range.video_max_fps = fps_range.max_fps;
6989
6990    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
6991            fps_range.min_fps, fps_range.max_fps);
6992    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
6993     * follows:
6994     * ---------------------------------------------------------------|
6995     *      Video stream is absent in configure_streams               |
6996     *    (Camcorder preview before the first video record            |
6997     * ---------------------------------------------------------------|
6998     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
6999     *                   |             |             | vid_min/max_fps|
7000     * ---------------------------------------------------------------|
7001     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7002     *                   |-------------|-------------|----------------|
7003     *                   |  [240, 240] |     240     |  [240, 240]    |
7004     * ---------------------------------------------------------------|
7005     *     Video stream is present in configure_streams               |
7006     * ---------------------------------------------------------------|
7007     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7008     *                   |             |             | vid_min/max_fps|
7009     * ---------------------------------------------------------------|
7010     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7011     * (camcorder prev   |-------------|-------------|----------------|
7012     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7013     *  is stopped)      |             |             |                |
7014     * ---------------------------------------------------------------|
7015     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7016     *                   |-------------|-------------|----------------|
7017     *                   |  [240, 240] |     240     |  [240, 240]    |
7018     * ---------------------------------------------------------------|
7019     * When Video stream is absent in configure_streams,
7020     * preview fps = sensor_fps / batchsize
7021     * Eg: for 240fps at batchSize 4, preview = 60fps
7022     *     for 120fps at batchSize 4, preview = 30fps
7023     *
7024     * When video stream is present in configure_streams, preview fps is as per
7025     * the ratio of preview buffers to video buffers requested in process
7026     * capture request
7027     */
7028    mBatchSize = 0;
7029    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7030        fps_range.min_fps = fps_range.video_max_fps;
7031        fps_range.video_min_fps = fps_range.video_max_fps;
7032        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7033                fps_range.max_fps);
7034        if (NAME_NOT_FOUND != val) {
7035            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7036            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7037                return BAD_VALUE;
7038            }
7039
7040            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7041                /* If batchmode is currently in progress and the fps changes,
7042                 * set the flag to restart the sensor */
7043                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7044                        (mHFRVideoFps != fps_range.max_fps)) {
7045                    mNeedSensorRestart = true;
7046                }
7047                mHFRVideoFps = fps_range.max_fps;
7048                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7049                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7050                    mBatchSize = MAX_HFR_BATCH_SIZE;
7051                }
7052             }
7053            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7054
7055         }
7056    }
7057    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7058        return BAD_VALUE;
7059    }
7060    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7061            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7062    return rc;
7063}
7064
7065/*===========================================================================
7066 * FUNCTION   : translateToHalMetadata
7067 *
7068 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7069 *
7070 *
7071 * PARAMETERS :
7072 *   @request  : request sent from framework
7073 *
7074 *
7075 * RETURN     : success: NO_ERROR
7076 *              failure:
7077 *==========================================================================*/
7078int QCamera3HardwareInterface::translateToHalMetadata
7079                                  (const camera3_capture_request_t *request,
7080                                   metadata_buffer_t *hal_metadata,
7081                                   uint32_t snapshotStreamId)
7082{
7083    int rc = 0;
7084    CameraMetadata frame_settings;
7085    frame_settings = request->settings;
7086
7087    /* Do not change the order of the following list unless you know what you are
7088     * doing.
7089     * The order is laid out in such a way that parameters in the front of the table
7090     * may be used to override the parameters later in the table. Examples are:
7091     * 1. META_MODE should precede AEC/AWB/AF MODE
7092     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7093     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7094     * 4. Any mode should precede it's corresponding settings
7095     */
7096    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7097        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7098        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7099            rc = BAD_VALUE;
7100        }
7101        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7102        if (rc != NO_ERROR) {
7103            ALOGE("%s: extractSceneMode failed", __func__);
7104        }
7105    }
7106
7107    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7108        uint8_t fwk_aeMode =
7109            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7110        uint8_t aeMode;
7111        int32_t redeye;
7112
7113        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
7114            aeMode = CAM_AE_MODE_OFF;
7115        } else {
7116            aeMode = CAM_AE_MODE_ON;
7117        }
7118        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
7119            redeye = 1;
7120        } else {
7121            redeye = 0;
7122        }
7123
7124        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7125                fwk_aeMode);
7126        if (NAME_NOT_FOUND != val) {
7127            int32_t flashMode = (int32_t)val;
7128            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
7129        }
7130
7131        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
7132        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
7133            rc = BAD_VALUE;
7134        }
7135    }
7136
7137    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
7138        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
7139        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7140                fwk_whiteLevel);
7141        if (NAME_NOT_FOUND != val) {
7142            uint8_t whiteLevel = (uint8_t)val;
7143            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
7144                rc = BAD_VALUE;
7145            }
7146        }
7147    }
7148
7149    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
7150        uint8_t fwk_cacMode =
7151                frame_settings.find(
7152                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
7153        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7154                fwk_cacMode);
7155        if (NAME_NOT_FOUND != val) {
7156            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
7157            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
7158                rc = BAD_VALUE;
7159            }
7160        } else {
7161            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
7162        }
7163    }
7164
7165    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
7166        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
7167        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7168                fwk_focusMode);
7169        if (NAME_NOT_FOUND != val) {
7170            uint8_t focusMode = (uint8_t)val;
7171            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
7172                rc = BAD_VALUE;
7173            }
7174        }
7175    }
7176
7177    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
7178        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
7179        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
7180                focalDistance)) {
7181            rc = BAD_VALUE;
7182        }
7183    }
7184
7185    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
7186        uint8_t fwk_antibandingMode =
7187                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
7188        int val = lookupHalName(ANTIBANDING_MODES_MAP,
7189                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
7190        if (NAME_NOT_FOUND != val) {
7191            uint32_t hal_antibandingMode = (uint32_t)val;
7192            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
7193                    hal_antibandingMode)) {
7194                rc = BAD_VALUE;
7195            }
7196        }
7197    }
7198
7199    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
7200        int32_t expCompensation = frame_settings.find(
7201                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
7202        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
7203            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
7204        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7205            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7206        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7207                expCompensation)) {
7208            rc = BAD_VALUE;
7209        }
7210    }
7211
7212    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7213        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7214        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7215            rc = BAD_VALUE;
7216        }
7217    }
7218    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7219        rc = setHalFpsRange(frame_settings, hal_metadata);
7220        if (rc != NO_ERROR) {
7221            ALOGE("%s: setHalFpsRange failed", __func__);
7222        }
7223    }
7224
7225    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7226        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7227        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7228            rc = BAD_VALUE;
7229        }
7230    }
7231
7232    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7233        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7234        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7235                fwk_effectMode);
7236        if (NAME_NOT_FOUND != val) {
7237            uint8_t effectMode = (uint8_t)val;
7238            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
7239                rc = BAD_VALUE;
7240            }
7241        }
7242    }
7243
7244    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7245        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7246        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
7247                colorCorrectMode)) {
7248            rc = BAD_VALUE;
7249        }
7250    }
7251
7252    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7253        cam_color_correct_gains_t colorCorrectGains;
7254        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7255            colorCorrectGains.gains[i] =
7256                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7257        }
7258        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
7259                colorCorrectGains)) {
7260            rc = BAD_VALUE;
7261        }
7262    }
7263
7264    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7265        cam_color_correct_matrix_t colorCorrectTransform;
7266        cam_rational_type_t transform_elem;
7267        size_t num = 0;
7268        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7269           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7270              transform_elem.numerator =
7271                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7272              transform_elem.denominator =
7273                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7274              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7275              num++;
7276           }
7277        }
7278        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7279                colorCorrectTransform)) {
7280            rc = BAD_VALUE;
7281        }
7282    }
7283
7284    cam_trigger_t aecTrigger;
7285    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7286    aecTrigger.trigger_id = -1;
7287    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7288        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7289        aecTrigger.trigger =
7290            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7291        aecTrigger.trigger_id =
7292            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7293        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7294                aecTrigger)) {
7295            rc = BAD_VALUE;
7296        }
7297        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7298                aecTrigger.trigger, aecTrigger.trigger_id);
7299    }
7300
7301    /*af_trigger must come with a trigger id*/
7302    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7303        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7304        cam_trigger_t af_trigger;
7305        af_trigger.trigger =
7306            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7307        af_trigger.trigger_id =
7308            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7309        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7310            rc = BAD_VALUE;
7311        }
7312        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7313                af_trigger.trigger, af_trigger.trigger_id);
7314    }
7315
7316    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7317        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7318        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
7319            rc = BAD_VALUE;
7320        }
7321    }
7322
7323    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7324        cam_edge_application_t edge_application;
7325        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7326        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7327            edge_application.sharpness = 0;
7328        } else {
7329            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
7330                uint8_t edgeStrength = frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
7331                edge_application.sharpness = (int32_t)edgeStrength;
7332            } else {
7333                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7334            }
7335        }
7336        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7337            rc = BAD_VALUE;
7338        }
7339    }
7340
7341    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7342        int32_t respectFlashMode = 1;
7343        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7344            uint8_t fwk_aeMode =
7345                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7346            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7347                respectFlashMode = 0;
7348                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7349                    __func__);
7350            }
7351        }
7352        if (respectFlashMode) {
7353            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7354                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7355            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7356            // To check: CAM_INTF_META_FLASH_MODE usage
7357            if (NAME_NOT_FOUND != val) {
7358                uint8_t flashMode = (uint8_t)val;
7359                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
7360                    rc = BAD_VALUE;
7361                }
7362            }
7363        }
7364    }
7365
7366    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7367        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7368        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
7369            rc = BAD_VALUE;
7370        }
7371    }
7372
7373    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7374        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
7375        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
7376                flashFiringTime)) {
7377            rc = BAD_VALUE;
7378        }
7379    }
7380
7381    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
7382        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
7383        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
7384                hotPixelMode)) {
7385            rc = BAD_VALUE;
7386        }
7387    }
7388
7389    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
7390        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
7391        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
7392                lensAperture)) {
7393            rc = BAD_VALUE;
7394        }
7395    }
7396
7397    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
7398        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
7399        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
7400                filterDensity)) {
7401            rc = BAD_VALUE;
7402        }
7403    }
7404
7405    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
7406        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
7407        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH, focalLength)) {
7408            rc = BAD_VALUE;
7409        }
7410    }
7411
7412    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
7413        uint8_t optStabMode =
7414                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
7415        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE, optStabMode)) {
7416            rc = BAD_VALUE;
7417        }
7418    }
7419
7420    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
7421        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
7422        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
7423                noiseRedMode)) {
7424            rc = BAD_VALUE;
7425        }
7426    }
7427
7428    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
7429        uint8_t noiseRedStrength =
7430                frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
7431        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
7432                noiseRedStrength)) {
7433            rc = BAD_VALUE;
7434        }
7435    }
7436
7437    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
7438        float reprocessEffectiveExposureFactor =
7439            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
7440        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
7441                reprocessEffectiveExposureFactor)) {
7442            rc = BAD_VALUE;
7443        }
7444    }
7445
7446    cam_crop_region_t scalerCropRegion;
7447    bool scalerCropSet = false;
7448    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
7449        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
7450        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
7451        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
7452        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
7453
7454        // Map coordinate system from active array to sensor output.
7455        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
7456                scalerCropRegion.width, scalerCropRegion.height);
7457
7458        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
7459                scalerCropRegion)) {
7460            rc = BAD_VALUE;
7461        }
7462        scalerCropSet = true;
7463    }
7464
7465    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
7466        int64_t sensorExpTime =
7467                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
7468        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
7469        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
7470                sensorExpTime)) {
7471            rc = BAD_VALUE;
7472        }
7473    }
7474
7475    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
7476        int64_t sensorFrameDuration =
7477                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
7478        int64_t minFrameDuration = getMinFrameDuration(request);
7479        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
7480        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
7481            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
7482        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
7483        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
7484                sensorFrameDuration)) {
7485            rc = BAD_VALUE;
7486        }
7487    }
7488
7489    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
7490        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
7491        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
7492                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
7493        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
7494                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
7495        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
7496        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
7497                sensorSensitivity)) {
7498            rc = BAD_VALUE;
7499        }
7500    }
7501
7502    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
7503        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
7504        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
7505            rc = BAD_VALUE;
7506        }
7507    }
7508
7509    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
7510        uint8_t shadingStrength = frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
7511        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
7512                shadingStrength)) {
7513            rc = BAD_VALUE;
7514        }
7515    }
7516
7517    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
7518        uint8_t fwk_facedetectMode =
7519                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
7520
7521        fwk_facedetectMode = (m_overrideAppFaceDetection < 0) ?
7522                                    fwk_facedetectMode : (uint8_t)m_overrideAppFaceDetection;
7523
7524        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7525                fwk_facedetectMode);
7526
7527        if (NAME_NOT_FOUND != val) {
7528            uint8_t facedetectMode = (uint8_t)val;
7529            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
7530                    facedetectMode)) {
7531                rc = BAD_VALUE;
7532            }
7533        }
7534    }
7535
7536    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
7537        uint8_t histogramMode =
7538                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
7539        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
7540                histogramMode)) {
7541            rc = BAD_VALUE;
7542        }
7543    }
7544
7545    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
7546        uint8_t sharpnessMapMode =
7547                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
7548        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
7549                sharpnessMapMode)) {
7550            rc = BAD_VALUE;
7551        }
7552    }
7553
7554    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
7555        uint8_t tonemapMode =
7556                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
7557        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
7558            rc = BAD_VALUE;
7559        }
7560    }
7561    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
7562    /*All tonemap channels will have the same number of points*/
7563    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
7564        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
7565        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
7566        cam_rgb_tonemap_curves tonemapCurves;
7567        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
7568        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7569            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
7570                    __func__, tonemapCurves.tonemap_points_cnt,
7571                    CAM_MAX_TONEMAP_CURVE_SIZE);
7572            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7573        }
7574
7575        /* ch0 = G*/
7576        size_t point = 0;
7577        cam_tonemap_curve_t tonemapCurveGreen;
7578        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7579            for (size_t j = 0; j < 2; j++) {
7580               tonemapCurveGreen.tonemap_points[i][j] =
7581                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
7582               point++;
7583            }
7584        }
7585        tonemapCurves.curves[0] = tonemapCurveGreen;
7586
7587        /* ch 1 = B */
7588        point = 0;
7589        cam_tonemap_curve_t tonemapCurveBlue;
7590        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7591            for (size_t j = 0; j < 2; j++) {
7592               tonemapCurveBlue.tonemap_points[i][j] =
7593                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
7594               point++;
7595            }
7596        }
7597        tonemapCurves.curves[1] = tonemapCurveBlue;
7598
7599        /* ch 2 = R */
7600        point = 0;
7601        cam_tonemap_curve_t tonemapCurveRed;
7602        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7603            for (size_t j = 0; j < 2; j++) {
7604               tonemapCurveRed.tonemap_points[i][j] =
7605                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
7606               point++;
7607            }
7608        }
7609        tonemapCurves.curves[2] = tonemapCurveRed;
7610
7611        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
7612                tonemapCurves)) {
7613            rc = BAD_VALUE;
7614        }
7615    }
7616
7617    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
7618        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
7619        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
7620                captureIntent)) {
7621            rc = BAD_VALUE;
7622        }
7623    }
7624
7625    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
7626        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
7627        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
7628                blackLevelLock)) {
7629            rc = BAD_VALUE;
7630        }
7631    }
7632
7633    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
7634        uint8_t lensShadingMapMode =
7635                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
7636        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
7637                lensShadingMapMode)) {
7638            rc = BAD_VALUE;
7639        }
7640    }
7641
7642    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
7643        cam_area_t roi;
7644        bool reset = true;
7645        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
7646
7647        // Map coordinate system from active array to sensor output.
7648        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
7649                roi.rect.height);
7650
7651        if (scalerCropSet) {
7652            reset = resetIfNeededROI(&roi, &scalerCropRegion);
7653        }
7654        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
7655            rc = BAD_VALUE;
7656        }
7657    }
7658
7659    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
7660        cam_area_t roi;
7661        bool reset = true;
7662        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
7663
7664        // Map coordinate system from active array to sensor output.
7665        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
7666                roi.rect.height);
7667
7668        if (scalerCropSet) {
7669            reset = resetIfNeededROI(&roi, &scalerCropRegion);
7670        }
7671        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
7672            rc = BAD_VALUE;
7673        }
7674    }
7675
7676    // CDS for non-HFR mode
7677    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
7678            (false == mPprocBypass) &&
7679            frame_settings.exists(QCAMERA3_CDS_MODE)) {
7680        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
7681        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
7682            ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
7683        } else {
7684            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
7685                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
7686                rc = BAD_VALUE;
7687            }
7688        }
7689    }
7690
7691    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
7692        int32_t fwk_testPatternMode =
7693                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
7694        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
7695                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
7696
7697        if (NAME_NOT_FOUND != testPatternMode) {
7698            cam_test_pattern_data_t testPatternData;
7699            memset(&testPatternData, 0, sizeof(testPatternData));
7700            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
7701            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
7702                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
7703                int32_t *fwk_testPatternData =
7704                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
7705                testPatternData.r = fwk_testPatternData[0];
7706                testPatternData.b = fwk_testPatternData[3];
7707                switch (gCamCapability[mCameraId]->color_arrangement) {
7708                    case CAM_FILTER_ARRANGEMENT_RGGB:
7709                    case CAM_FILTER_ARRANGEMENT_GRBG:
7710                        testPatternData.gr = fwk_testPatternData[1];
7711                        testPatternData.gb = fwk_testPatternData[2];
7712                        break;
7713                    case CAM_FILTER_ARRANGEMENT_GBRG:
7714                    case CAM_FILTER_ARRANGEMENT_BGGR:
7715                        testPatternData.gr = fwk_testPatternData[2];
7716                        testPatternData.gb = fwk_testPatternData[1];
7717                        break;
7718                    default:
7719                        ALOGE("%s: color arrangement %d is not supported", __func__,
7720                                gCamCapability[mCameraId]->color_arrangement);
7721                        break;
7722                }
7723            }
7724            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
7725                    testPatternData)) {
7726                rc = BAD_VALUE;
7727            }
7728        } else {
7729            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
7730                    fwk_testPatternMode);
7731        }
7732    }
7733
7734    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
7735        size_t count = 0;
7736        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
7737        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
7738                gps_coords.data.d, gps_coords.count, count);
7739        if (gps_coords.count != count) {
7740            rc = BAD_VALUE;
7741        }
7742    }
7743
7744    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
7745        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
7746        size_t count = 0;
7747        const char *gps_methods_src = (const char *)
7748                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
7749        memset(gps_methods, '\0', sizeof(gps_methods));
7750        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
7751        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
7752                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
7753        if (GPS_PROCESSING_METHOD_SIZE != count) {
7754            rc = BAD_VALUE;
7755        }
7756    }
7757
7758    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
7759        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
7760        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
7761                gps_timestamp)) {
7762            rc = BAD_VALUE;
7763        }
7764    }
7765
7766    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7767        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
7768        cam_rotation_info_t rotation_info;
7769        if (orientation == 0) {
7770           rotation_info.rotation = ROTATE_0;
7771        } else if (orientation == 90) {
7772           rotation_info.rotation = ROTATE_90;
7773        } else if (orientation == 180) {
7774           rotation_info.rotation = ROTATE_180;
7775        } else if (orientation == 270) {
7776           rotation_info.rotation = ROTATE_270;
7777        }
7778        rotation_info.streamId = snapshotStreamId;
7779        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
7780        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
7781            rc = BAD_VALUE;
7782        }
7783    }
7784
7785    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
7786        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
7787        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
7788            rc = BAD_VALUE;
7789        }
7790    }
7791
7792    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
7793        uint32_t thumb_quality = (uint32_t)
7794                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
7795        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
7796                thumb_quality)) {
7797            rc = BAD_VALUE;
7798        }
7799    }
7800
7801    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7802        cam_dimension_t dim;
7803        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7804        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7805        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
7806            rc = BAD_VALUE;
7807        }
7808    }
7809
7810    // Internal metadata
7811    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
7812        size_t count = 0;
7813        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
7814        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
7815                privatedata.data.i32, privatedata.count, count);
7816        if (privatedata.count != count) {
7817            rc = BAD_VALUE;
7818        }
7819    }
7820
7821    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
7822        uint8_t* use_av_timer =
7823                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
7824        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
7825            rc = BAD_VALUE;
7826        }
7827    }
7828
7829    // EV step
7830    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
7831            gCamCapability[mCameraId]->exp_compensation_step)) {
7832        rc = BAD_VALUE;
7833    }
7834
7835    return rc;
7836}
7837
7838/*===========================================================================
7839 * FUNCTION   : captureResultCb
7840 *
7841 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
7842 *
7843 * PARAMETERS :
7844 *   @frame  : frame information from mm-camera-interface
7845 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
7846 *   @userdata: userdata
7847 *
7848 * RETURN     : NONE
7849 *==========================================================================*/
7850void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
7851                camera3_stream_buffer_t *buffer,
7852                uint32_t frame_number, void *userdata)
7853{
7854    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
7855    if (hw == NULL) {
7856        ALOGE("%s: Invalid hw %p", __func__, hw);
7857        return;
7858    }
7859
7860    hw->captureResultCb(metadata, buffer, frame_number);
7861    return;
7862}
7863
7864
7865/*===========================================================================
7866 * FUNCTION   : initialize
7867 *
7868 * DESCRIPTION: Pass framework callback pointers to HAL
7869 *
7870 * PARAMETERS :
7871 *
7872 *
7873 * RETURN     : Success : 0
7874 *              Failure: -ENODEV
7875 *==========================================================================*/
7876
7877int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
7878                                  const camera3_callback_ops_t *callback_ops)
7879{
7880    CDBG("%s: E", __func__);
7881    QCamera3HardwareInterface *hw =
7882        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7883    if (!hw) {
7884        ALOGE("%s: NULL camera device", __func__);
7885        return -ENODEV;
7886    }
7887
7888    int rc = hw->initialize(callback_ops);
7889    CDBG("%s: X", __func__);
7890    return rc;
7891}
7892
7893/*===========================================================================
7894 * FUNCTION   : configure_streams
7895 *
7896 * DESCRIPTION:
7897 *
7898 * PARAMETERS :
7899 *
7900 *
7901 * RETURN     : Success: 0
7902 *              Failure: -EINVAL (if stream configuration is invalid)
7903 *                       -ENODEV (fatal error)
7904 *==========================================================================*/
7905
7906int QCamera3HardwareInterface::configure_streams(
7907        const struct camera3_device *device,
7908        camera3_stream_configuration_t *stream_list)
7909{
7910    CDBG("%s: E", __func__);
7911    QCamera3HardwareInterface *hw =
7912        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7913    if (!hw) {
7914        ALOGE("%s: NULL camera device", __func__);
7915        return -ENODEV;
7916    }
7917    int rc = hw->configureStreams(stream_list);
7918    CDBG("%s: X", __func__);
7919    return rc;
7920}
7921
7922/*===========================================================================
7923 * FUNCTION   : construct_default_request_settings
7924 *
7925 * DESCRIPTION: Configure a settings buffer to meet the required use case
7926 *
7927 * PARAMETERS :
7928 *
7929 *
7930 * RETURN     : Success: Return valid metadata
7931 *              Failure: Return NULL
7932 *==========================================================================*/
7933const camera_metadata_t* QCamera3HardwareInterface::
7934    construct_default_request_settings(const struct camera3_device *device,
7935                                        int type)
7936{
7937
7938    CDBG("%s: E", __func__);
7939    camera_metadata_t* fwk_metadata = NULL;
7940    QCamera3HardwareInterface *hw =
7941        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7942    if (!hw) {
7943        ALOGE("%s: NULL camera device", __func__);
7944        return NULL;
7945    }
7946
7947    fwk_metadata = hw->translateCapabilityToMetadata(type);
7948
7949    CDBG("%s: X", __func__);
7950    return fwk_metadata;
7951}
7952
7953/*===========================================================================
7954 * FUNCTION   : process_capture_request
7955 *
7956 * DESCRIPTION:
7957 *
7958 * PARAMETERS :
7959 *
7960 *
7961 * RETURN     :
7962 *==========================================================================*/
7963int QCamera3HardwareInterface::process_capture_request(
7964                    const struct camera3_device *device,
7965                    camera3_capture_request_t *request)
7966{
7967    CDBG("%s: E", __func__);
7968    QCamera3HardwareInterface *hw =
7969        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7970    if (!hw) {
7971        ALOGE("%s: NULL camera device", __func__);
7972        return -EINVAL;
7973    }
7974
7975    int rc = hw->processCaptureRequest(request);
7976    CDBG("%s: X", __func__);
7977    return rc;
7978}
7979
7980/*===========================================================================
7981 * FUNCTION   : dump
7982 *
7983 * DESCRIPTION:
7984 *
7985 * PARAMETERS :
7986 *
7987 *
7988 * RETURN     :
7989 *==========================================================================*/
7990
7991void QCamera3HardwareInterface::dump(
7992                const struct camera3_device *device, int fd)
7993{
7994    /* Log level property is read when "adb shell dumpsys media.camera" is
7995       called so that the log level can be controlled without restarting
7996       the media server */
7997    getLogLevel();
7998
7999    CDBG("%s: E", __func__);
8000    QCamera3HardwareInterface *hw =
8001        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8002    if (!hw) {
8003        ALOGE("%s: NULL camera device", __func__);
8004        return;
8005    }
8006
8007    hw->dump(fd);
8008    CDBG("%s: X", __func__);
8009    return;
8010}
8011
8012/*===========================================================================
8013 * FUNCTION   : flush
8014 *
8015 * DESCRIPTION:
8016 *
8017 * PARAMETERS :
8018 *
8019 *
8020 * RETURN     :
8021 *==========================================================================*/
8022
8023int QCamera3HardwareInterface::flush(
8024                const struct camera3_device *device)
8025{
8026    int rc;
8027    CDBG("%s: E", __func__);
8028    QCamera3HardwareInterface *hw =
8029        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8030    if (!hw) {
8031        ALOGE("%s: NULL camera device", __func__);
8032        return -EINVAL;
8033    }
8034
8035    rc = hw->flush();
8036    CDBG("%s: X", __func__);
8037    return rc;
8038}
8039
8040/*===========================================================================
8041 * FUNCTION   : close_camera_device
8042 *
8043 * DESCRIPTION:
8044 *
8045 * PARAMETERS :
8046 *
8047 *
8048 * RETURN     :
8049 *==========================================================================*/
8050int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8051{
8052    CDBG("%s: E", __func__);
8053    int ret = NO_ERROR;
8054    QCamera3HardwareInterface *hw =
8055        reinterpret_cast<QCamera3HardwareInterface *>(
8056            reinterpret_cast<camera3_device_t *>(device)->priv);
8057    if (!hw) {
8058        ALOGE("NULL camera device");
8059        return BAD_VALUE;
8060    }
8061    delete hw;
8062
8063    CDBG("%s: X", __func__);
8064    return ret;
8065}
8066
8067/*===========================================================================
8068 * FUNCTION   : getWaveletDenoiseProcessPlate
8069 *
8070 * DESCRIPTION: query wavelet denoise process plate
8071 *
8072 * PARAMETERS : None
8073 *
8074 * RETURN     : WNR prcocess plate vlaue
8075 *==========================================================================*/
8076cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8077{
8078    char prop[PROPERTY_VALUE_MAX];
8079    memset(prop, 0, sizeof(prop));
8080    property_get("persist.denoise.process.plates", prop, "0");
8081    int processPlate = atoi(prop);
8082    switch(processPlate) {
8083    case 0:
8084        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8085    case 1:
8086        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8087    case 2:
8088        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8089    case 3:
8090        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8091    default:
8092        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8093    }
8094}
8095
8096
8097/*===========================================================================
8098 * FUNCTION   : extractSceneMode
8099 *
8100 * DESCRIPTION: Extract scene mode from frameworks set metadata
8101 *
8102 * PARAMETERS :
8103 *      @frame_settings: CameraMetadata reference
8104 *      @metaMode: ANDROID_CONTORL_MODE
8105 *      @hal_metadata: hal metadata structure
8106 *
8107 * RETURN     : None
8108 *==========================================================================*/
8109int32_t QCamera3HardwareInterface::extractSceneMode(
8110        const CameraMetadata &frame_settings, uint8_t metaMode,
8111        metadata_buffer_t *hal_metadata)
8112{
8113    int32_t rc = NO_ERROR;
8114
8115    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
8116        camera_metadata_ro_entry entry =
8117                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
8118        if (0 == entry.count)
8119            return rc;
8120
8121        uint8_t fwk_sceneMode = entry.data.u8[0];
8122
8123        int val = lookupHalName(SCENE_MODES_MAP,
8124                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
8125                fwk_sceneMode);
8126        if (NAME_NOT_FOUND != val) {
8127            uint8_t sceneMode = (uint8_t)val;
8128            CDBG("%s: sceneMode: %d", __func__, sceneMode);
8129            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8130                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8131                rc = BAD_VALUE;
8132            }
8133        }
8134    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
8135            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
8136        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
8137        CDBG("%s: sceneMode: %d", __func__, sceneMode);
8138        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8139                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8140            rc = BAD_VALUE;
8141        }
8142    }
8143    return rc;
8144}
8145
8146/*===========================================================================
8147 * FUNCTION   : needRotationReprocess
8148 *
8149 * DESCRIPTION: if rotation needs to be done by reprocess in pp
8150 *
8151 * PARAMETERS : none
8152 *
8153 * RETURN     : true: needed
8154 *              false: no need
8155 *==========================================================================*/
8156bool QCamera3HardwareInterface::needRotationReprocess()
8157{
8158    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
8159        // current rotation is not zero, and pp has the capability to process rotation
8160        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
8161        return true;
8162    }
8163
8164    return false;
8165}
8166
8167/*===========================================================================
8168 * FUNCTION   : needReprocess
8169 *
8170 * DESCRIPTION: if reprocess in needed
8171 *
8172 * PARAMETERS : none
8173 *
8174 * RETURN     : true: needed
8175 *              false: no need
8176 *==========================================================================*/
8177bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
8178{
8179    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
8180        // TODO: add for ZSL HDR later
8181        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
8182        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
8183            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
8184            return true;
8185        } else {
8186            CDBG_HIGH("%s: already post processed frame", __func__);
8187            return false;
8188        }
8189    }
8190    return needRotationReprocess();
8191}
8192
8193/*===========================================================================
8194 * FUNCTION   : needJpegRotation
8195 *
8196 * DESCRIPTION: if rotation from jpeg is needed
8197 *
8198 * PARAMETERS : none
8199 *
8200 * RETURN     : true: needed
8201 *              false: no need
8202 *==========================================================================*/
8203bool QCamera3HardwareInterface::needJpegRotation()
8204{
8205   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
8206    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
8207       CDBG("%s: Need Jpeg to do the rotation", __func__);
8208       return true;
8209    }
8210    return false;
8211}
8212
8213/*===========================================================================
8214 * FUNCTION   : addOfflineReprocChannel
8215 *
8216 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
8217 *              coming from input channel
8218 *
8219 * PARAMETERS :
8220 *   @config  : reprocess configuration
8221 *   @inputChHandle : pointer to the input (source) channel
8222 *
8223 *
8224 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8225 *==========================================================================*/
8226QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8227        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8228{
8229    int32_t rc = NO_ERROR;
8230    QCamera3ReprocessChannel *pChannel = NULL;
8231
8232    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8233            mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8234    if (NULL == pChannel) {
8235        ALOGE("%s: no mem for reprocess channel", __func__);
8236        return NULL;
8237    }
8238
8239    rc = pChannel->initialize(IS_TYPE_NONE);
8240    if (rc != NO_ERROR) {
8241        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8242        delete pChannel;
8243        return NULL;
8244    }
8245
8246    // pp feature config
8247    cam_pp_feature_config_t pp_config;
8248    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8249
8250    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8251
8252    rc = pChannel->addReprocStreamsFromSource(pp_config,
8253            config,
8254            IS_TYPE_NONE,
8255            mMetadataChannel);
8256
8257    if (rc != NO_ERROR) {
8258        delete pChannel;
8259        return NULL;
8260    }
8261    return pChannel;
8262}
8263
8264/*===========================================================================
8265 * FUNCTION   : getMobicatMask
8266 *
8267 * DESCRIPTION: returns mobicat mask
8268 *
8269 * PARAMETERS : none
8270 *
8271 * RETURN     : mobicat mask
8272 *
8273 *==========================================================================*/
8274uint8_t QCamera3HardwareInterface::getMobicatMask()
8275{
8276    return m_MobicatMask;
8277}
8278
8279/*===========================================================================
8280 * FUNCTION   : setMobicat
8281 *
8282 * DESCRIPTION: set Mobicat on/off.
8283 *
8284 * PARAMETERS :
8285 *   @params  : none
8286 *
8287 * RETURN     : int32_t type of status
8288 *              NO_ERROR  -- success
8289 *              none-zero failure code
8290 *==========================================================================*/
8291int32_t QCamera3HardwareInterface::setMobicat()
8292{
8293    char value [PROPERTY_VALUE_MAX];
8294    property_get("persist.camera.mobicat", value, "0");
8295    int32_t ret = NO_ERROR;
8296    uint8_t enableMobi = (uint8_t)atoi(value);
8297
8298    if (enableMobi) {
8299        tune_cmd_t tune_cmd;
8300        tune_cmd.type = SET_RELOAD_CHROMATIX;
8301        tune_cmd.module = MODULE_ALL;
8302        tune_cmd.value = TRUE;
8303        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8304                CAM_INTF_PARM_SET_VFE_COMMAND,
8305                tune_cmd);
8306
8307        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8308                CAM_INTF_PARM_SET_PP_COMMAND,
8309                tune_cmd);
8310    }
8311    m_MobicatMask = enableMobi;
8312
8313    return ret;
8314}
8315
8316/*===========================================================================
8317* FUNCTION   : getLogLevel
8318*
8319* DESCRIPTION: Reads the log level property into a variable
8320*
8321* PARAMETERS :
8322*   None
8323*
8324* RETURN     :
8325*   None
8326*==========================================================================*/
8327void QCamera3HardwareInterface::getLogLevel()
8328{
8329    char prop[PROPERTY_VALUE_MAX];
8330    uint32_t globalLogLevel = 0;
8331
8332    property_get("persist.camera.hal.debug", prop, "0");
8333    int val = atoi(prop);
8334    if (0 <= val) {
8335        gCamHal3LogLevel = (uint32_t)val;
8336    }
8337    property_get("persist.camera.global.debug", prop, "0");
8338    val = atoi(prop);
8339    if (0 <= val) {
8340        globalLogLevel = (uint32_t)val;
8341    }
8342
8343    /* Highest log level among hal.logs and global.logs is selected */
8344    if (gCamHal3LogLevel < globalLogLevel)
8345        gCamHal3LogLevel = globalLogLevel;
8346
8347    return;
8348}
8349
8350/*===========================================================================
8351 * FUNCTION   : validateStreamRotations
8352 *
8353 * DESCRIPTION: Check if the rotations requested are supported
8354 *
8355 * PARAMETERS :
8356 *   @stream_list : streams to be configured
8357 *
8358 * RETURN     : NO_ERROR on success
8359 *              -EINVAL on failure
8360 *
8361 *==========================================================================*/
8362int QCamera3HardwareInterface::validateStreamRotations(
8363        camera3_stream_configuration_t *streamList)
8364{
8365    int rc = NO_ERROR;
8366
8367    /*
8368    * Loop through all streams requested in configuration
8369    * Check if unsupported rotations have been requested on any of them
8370    */
8371    for (size_t j = 0; j < streamList->num_streams; j++){
8372        camera3_stream_t *newStream = streamList->streams[j];
8373
8374        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
8375        bool isImplDef = (newStream->format ==
8376                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
8377        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
8378                isImplDef);
8379
8380        if (isRotated && (!isImplDef || isZsl)) {
8381            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
8382                    "type:%d and stream format:%d", __func__,
8383                    newStream->rotation, newStream->stream_type,
8384                    newStream->format);
8385            rc = -EINVAL;
8386            break;
8387        }
8388    }
8389    return rc;
8390}
8391
8392/*===========================================================================
8393* FUNCTION   : getFlashInfo
8394*
8395* DESCRIPTION: Retrieve information about whether the device has a flash.
8396*
8397* PARAMETERS :
8398*   @cameraId  : Camera id to query
8399*   @hasFlash  : Boolean indicating whether there is a flash device
8400*                associated with given camera
8401*   @flashNode : If a flash device exists, this will be its device node.
8402*
8403* RETURN     :
8404*   None
8405*==========================================================================*/
8406void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
8407        bool& hasFlash,
8408        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
8409{
8410    cam_capability_t* camCapability = gCamCapability[cameraId];
8411    if (NULL == camCapability) {
8412        hasFlash = false;
8413        flashNode[0] = '\0';
8414    } else {
8415        hasFlash = camCapability->flash_available;
8416        strlcpy(flashNode,
8417                (char*)camCapability->flash_dev_name,
8418                QCAMERA_MAX_FILEPATH_LENGTH);
8419    }
8420}
8421
8422/*===========================================================================
8423 * FUNCTION   : dynamicUpdateMetaStreamInfo
8424 *
8425 * DESCRIPTION: This function:
8426 *             (1) stops all the channels
8427 *             (2) returns error on pending requests and buffers
8428 *             (3) sends metastream_info in setparams
8429 *             (4) starts all channels
8430 *             This is useful when sensor has to be restarted to apply any
8431 *             settings such as frame rate from a different sensor mode
8432 *
8433 * PARAMETERS : None
8434 *
8435 * RETURN     : NO_ERROR on success
8436 *              Error codes on failure
8437 *
8438 *==========================================================================*/
8439int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
8440{
8441    ATRACE_CALL();
8442    int rc = NO_ERROR;
8443
8444    CDBG("%s: E", __func__);
8445
8446    rc = stopAllChannels();
8447    if (rc < 0) {
8448        ALOGE("%s: stopAllChannels failed", __func__);
8449        return rc;
8450    }
8451
8452    rc = notifyErrorForPendingRequests();
8453    if (rc < 0) {
8454        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
8455        return rc;
8456    }
8457
8458    /* Send meta stream info once again so that ISP can start */
8459    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8460            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
8461    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
8462    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
8463            mParameters);
8464    if (rc < 0) {
8465        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
8466                __func__);
8467    }
8468
8469    rc = startAllChannels();
8470    if (rc < 0) {
8471        ALOGE("%s: startAllChannels failed", __func__);
8472        return rc;
8473    }
8474
8475    CDBG("%s:%d X", __func__, __LINE__);
8476    return rc;
8477}
8478
8479/*===========================================================================
8480 * FUNCTION   : stopAllChannels
8481 *
8482 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
8483 *
8484 * PARAMETERS : None
8485 *
8486 * RETURN     : NO_ERROR on success
8487 *              Error codes on failure
8488 *
8489 *==========================================================================*/
8490int32_t QCamera3HardwareInterface::stopAllChannels()
8491{
8492    int32_t rc = NO_ERROR;
8493
8494    // Stop the Streams/Channels
8495    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8496        it != mStreamInfo.end(); it++) {
8497        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
8498        channel->stop();
8499        (*it)->status = INVALID;
8500    }
8501
8502    if (mSupportChannel) {
8503        mSupportChannel->stop();
8504    }
8505    if (mAnalysisChannel) {
8506        mAnalysisChannel->stop();
8507    }
8508    if (mRawDumpChannel) {
8509        mRawDumpChannel->stop();
8510    }
8511    if (mMetadataChannel) {
8512        /* If content of mStreamInfo is not 0, there is metadata stream */
8513        mMetadataChannel->stop();
8514    }
8515
8516    CDBG("%s:%d All channels stopped", __func__, __LINE__);
8517    return rc;
8518}
8519
8520/*===========================================================================
8521 * FUNCTION   : startAllChannels
8522 *
8523 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
8524 *
8525 * PARAMETERS : None
8526 *
8527 * RETURN     : NO_ERROR on success
8528 *              Error codes on failure
8529 *
8530 *==========================================================================*/
8531int32_t QCamera3HardwareInterface::startAllChannels()
8532{
8533    int32_t rc = NO_ERROR;
8534
8535    CDBG("%s: Start all channels ", __func__);
8536    // Start the Streams/Channels
8537    if (mMetadataChannel) {
8538        /* If content of mStreamInfo is not 0, there is metadata stream */
8539        rc = mMetadataChannel->start();
8540        if (rc < 0) {
8541            ALOGE("%s: META channel start failed", __func__);
8542            return rc;
8543        }
8544    }
8545    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8546        it != mStreamInfo.end(); it++) {
8547        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
8548        rc = channel->start();
8549        if (rc < 0) {
8550            ALOGE("%s: channel start failed", __func__);
8551            return rc;
8552        }
8553    }
8554    if (mAnalysisChannel) {
8555        mAnalysisChannel->start();
8556    }
8557    if (mSupportChannel) {
8558        rc = mSupportChannel->start();
8559        if (rc < 0) {
8560            ALOGE("%s: Support channel start failed", __func__);
8561            return rc;
8562        }
8563    }
8564    if (mRawDumpChannel) {
8565        rc = mRawDumpChannel->start();
8566        if (rc < 0) {
8567            ALOGE("%s: RAW dump channel start failed", __func__);
8568            return rc;
8569        }
8570    }
8571
8572    CDBG("%s:%d All channels started", __func__, __LINE__);
8573    return rc;
8574}
8575
8576/*===========================================================================
8577 * FUNCTION   : notifyErrorForPendingRequests
8578 *
8579 * DESCRIPTION: This function sends error for all the pending requests/buffers
8580 *
8581 * PARAMETERS : None
8582 *
8583 * RETURN     : Error codes
8584 *              NO_ERROR on success
8585 *
8586 *==========================================================================*/
8587int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
8588{
8589    int32_t rc = NO_ERROR;
8590    unsigned int frameNum = 0;
8591    camera3_capture_result_t result;
8592    camera3_stream_buffer_t *pStream_Buf = NULL;
8593    FlushMap flushMap;
8594
8595    memset(&result, 0, sizeof(camera3_capture_result_t));
8596
8597    pendingRequestIterator i = mPendingRequestsList.begin();
8598    frameNum = i->frame_number;
8599    CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
8600      __func__, frameNum);
8601
8602    // Go through the pending buffers and group them depending
8603    // on frame number
8604    for (List<PendingBufferInfo>::iterator k =
8605            mPendingBuffersMap.mPendingBufferList.begin();
8606            k != mPendingBuffersMap.mPendingBufferList.end();) {
8607
8608        if (k->frame_number < frameNum) {
8609            ssize_t idx = flushMap.indexOfKey(k->frame_number);
8610            if (idx == NAME_NOT_FOUND) {
8611                Vector<PendingBufferInfo> pending;
8612                pending.add(*k);
8613                flushMap.add(k->frame_number, pending);
8614            } else {
8615                Vector<PendingBufferInfo> &pending =
8616                        flushMap.editValueFor(k->frame_number);
8617                pending.add(*k);
8618            }
8619
8620            mPendingBuffersMap.num_buffers--;
8621            k = mPendingBuffersMap.mPendingBufferList.erase(k);
8622        } else {
8623            k++;
8624        }
8625    }
8626
8627    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
8628        uint32_t frame_number = flushMap.keyAt(iFlush);
8629        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
8630
8631        // Send Error notify to frameworks for each buffer for which
8632        // metadata buffer is already sent
8633        CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
8634          __func__, frame_number, pending.size());
8635
8636        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
8637        if (NULL == pStream_Buf) {
8638            ALOGE("%s: No memory for pending buffers array", __func__);
8639            return NO_MEMORY;
8640        }
8641        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
8642
8643        for (size_t j = 0; j < pending.size(); j++) {
8644            const PendingBufferInfo &info = pending.itemAt(j);
8645            camera3_notify_msg_t notify_msg;
8646            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
8647            notify_msg.type = CAMERA3_MSG_ERROR;
8648            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
8649            notify_msg.message.error.error_stream = info.stream;
8650            notify_msg.message.error.frame_number = frame_number;
8651            pStream_Buf[j].acquire_fence = -1;
8652            pStream_Buf[j].release_fence = -1;
8653            pStream_Buf[j].buffer = info.buffer;
8654            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
8655            pStream_Buf[j].stream = info.stream;
8656            mCallbackOps->notify(mCallbackOps, &notify_msg);
8657            CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
8658                    frame_number, info.stream);
8659        }
8660
8661        result.result = NULL;
8662        result.frame_number = frame_number;
8663        result.num_output_buffers = (uint32_t)pending.size();
8664        result.output_buffers = pStream_Buf;
8665        mCallbackOps->process_capture_result(mCallbackOps, &result);
8666
8667        delete [] pStream_Buf;
8668    }
8669
8670    CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
8671
8672    flushMap.clear();
8673    for (List<PendingBufferInfo>::iterator k =
8674            mPendingBuffersMap.mPendingBufferList.begin();
8675            k != mPendingBuffersMap.mPendingBufferList.end();) {
8676        ssize_t idx = flushMap.indexOfKey(k->frame_number);
8677        if (idx == NAME_NOT_FOUND) {
8678            Vector<PendingBufferInfo> pending;
8679            pending.add(*k);
8680            flushMap.add(k->frame_number, pending);
8681        } else {
8682            Vector<PendingBufferInfo> &pending =
8683                    flushMap.editValueFor(k->frame_number);
8684            pending.add(*k);
8685        }
8686
8687        mPendingBuffersMap.num_buffers--;
8688        k = mPendingBuffersMap.mPendingBufferList.erase(k);
8689    }
8690
8691    // Go through the pending requests info and send error request to framework
8692    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
8693        uint32_t frame_number = flushMap.keyAt(iFlush);
8694        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
8695        CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
8696              __func__, frame_number);
8697
8698        // Send shutter notify to frameworks
8699        camera3_notify_msg_t notify_msg;
8700        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
8701        notify_msg.type = CAMERA3_MSG_ERROR;
8702        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
8703        notify_msg.message.error.error_stream = NULL;
8704        notify_msg.message.error.frame_number = frame_number;
8705        mCallbackOps->notify(mCallbackOps, &notify_msg);
8706
8707        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
8708        if (NULL == pStream_Buf) {
8709            ALOGE("%s: No memory for pending buffers array", __func__);
8710            return NO_MEMORY;
8711        }
8712        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
8713
8714        for (size_t j = 0; j < pending.size(); j++) {
8715            const PendingBufferInfo &info = pending.itemAt(j);
8716            pStream_Buf[j].acquire_fence = -1;
8717            pStream_Buf[j].release_fence = -1;
8718            pStream_Buf[j].buffer = info.buffer;
8719            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
8720            pStream_Buf[j].stream = info.stream;
8721        }
8722
8723        result.num_output_buffers = (uint32_t)pending.size();
8724        result.output_buffers = pStream_Buf;
8725        result.result = NULL;
8726        result.frame_number = frame_number;
8727        mCallbackOps->process_capture_result(mCallbackOps, &result);
8728        delete [] pStream_Buf;
8729    }
8730
8731    /* Reset pending buffer list and requests list */
8732    for (pendingRequestIterator i = mPendingRequestsList.begin();
8733            i != mPendingRequestsList.end();) {
8734        i = erasePendingRequest(i);
8735    }
8736    /* Reset pending frame Drop list and requests list */
8737    mPendingFrameDropList.clear();
8738
8739    flushMap.clear();
8740    mPendingBuffersMap.num_buffers = 0;
8741    mPendingBuffersMap.mPendingBufferList.clear();
8742    mPendingReprocessResultList.clear();
8743    CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
8744
8745    return rc;
8746}
8747
8748bool QCamera3HardwareInterface::isOnEncoder(
8749        const cam_dimension_t max_viewfinder_size,
8750        uint32_t width, uint32_t height)
8751{
8752    return (width > (uint32_t)max_viewfinder_size.width ||
8753            height > (uint32_t)max_viewfinder_size.height);
8754}
8755
8756}; //end namespace qcamera
8757