QCamera3HWI.cpp revision c622c1d72f9e5d5bca91af4c068e1ba0b8d09d90
1/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34#include <cutils/properties.h>
35#include <hardware/camera3.h>
36#include <camera/CameraMetadata.h>
37#include <stdlib.h>
38#include <fcntl.h>
39#include <stdint.h>
40#include <utils/Log.h>
41#include <utils/Errors.h>
42#include <ui/Fence.h>
43#include <gralloc_priv.h>
44#include "QCamera3HWI.h"
45#include "QCamera3Mem.h"
46#include "QCamera3Channel.h"
47#include "QCamera3PostProc.h"
48#include "QCamera3VendorTags.h"
49
50using namespace android;
51
52namespace qcamera {
53
54#define MAX(a, b) ((a) > (b) ? (a) : (b))
55
56#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
57
58cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
59const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
60
61pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
62    PTHREAD_MUTEX_INITIALIZER;
63unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
64
65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
66    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
67    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
68    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
69    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
70    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
71    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
72    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
73    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
74    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
75};
76
77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
78    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
79    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
80    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
81    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
82    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
83    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
84    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
85    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
86    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
87};
88
89const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
90    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
91    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
92    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
93    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
94    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
95    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
96    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
97    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
98    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
99    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
100    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
101    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
102    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
103    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
104    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
105    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
109    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
110    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
111    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
112    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
113    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
114    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
115    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
116};
117
118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
119    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
120    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
121    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
122    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
123};
124
125const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
126    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
127    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
128    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
129    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
130    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
134    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
135    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
136    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
137};
138
139const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
140    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
141    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
142};
143
144const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
145    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
146      CAM_FOCUS_UNCALIBRATED },
147    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
148      CAM_FOCUS_APPROXIMATE },
149    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
150      CAM_FOCUS_CALIBRATED }
151};
152
153const int32_t available_thumbnail_sizes[] = {0, 0,
154                                             176, 144,
155                                             320, 240,
156                                             432, 288,
157                                             480, 288,
158                                             512, 288,
159                                             512, 384};
160
161const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
162    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
163    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
164    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
165    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
166    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
167};
168
169/* Since there is no mapping for all the options some Android enum are not listed.
170 * Also, the order in this list is important because while mapping from HAL to Android it will
171 * traverse from lower to higher index which means that for HAL values that are map to different
172 * Android values, the traverse logic will select the first one found.
173 */
174const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
175    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
176    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
177    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
178    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
179    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
180    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
181    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
182    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
183    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
184    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
185    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
186    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
187    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
188    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
189    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
190    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
191};
192
193/* Custom tag definitions */
194
195camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
196    initialize:                         QCamera3HardwareInterface::initialize,
197    configure_streams:                  QCamera3HardwareInterface::configure_streams,
198    register_stream_buffers:            NULL,
199    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
200    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
201    get_metadata_vendor_tag_ops:        NULL,
202    dump:                               QCamera3HardwareInterface::dump,
203    flush:                              QCamera3HardwareInterface::flush,
204    reserved:                           {0},
205};
206
207int QCamera3HardwareInterface::kMaxInFlight = 5;
208
209/*===========================================================================
210 * FUNCTION   : QCamera3HardwareInterface
211 *
212 * DESCRIPTION: constructor of QCamera3HardwareInterface
213 *
214 * PARAMETERS :
215 *   @cameraId  : camera ID
216 *
217 * RETURN     : none
218 *==========================================================================*/
219QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
220                        const camera_module_callbacks_t *callbacks)
221    : mCameraId(cameraId),
222      mCameraHandle(NULL),
223      mCameraOpened(false),
224      mCameraInitialized(false),
225      mCallbackOps(NULL),
226      mInputStream(NULL),
227      mMetadataChannel(NULL),
228      mPictureChannel(NULL),
229      mRawChannel(NULL),
230      mSupportChannel(NULL),
231      mFirstRequest(false),
232      mParamHeap(NULL),
233      mParameters(NULL),
234      mLoopBackResult(NULL),
235      mMinProcessedFrameDuration(0),
236      mMinJpegFrameDuration(0),
237      mMinRawFrameDuration(0),
238      m_pPowerModule(NULL),
239      mHdrHint(false),
240      mMetaFrameCount(0),
241      mCallbacks(callbacks)
242{
243    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
244    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
245    mCameraDevice.common.close = close_camera_device;
246    mCameraDevice.ops = &mCameraOps;
247    mCameraDevice.priv = this;
248    gCamCapability[cameraId]->version = CAM_HAL_V3;
249    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
250    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
251    gCamCapability[cameraId]->min_num_pp_bufs = 3;
252
253    pthread_cond_init(&mRequestCond, NULL);
254    mPendingRequest = 0;
255    mCurrentRequestId = -1;
256    pthread_mutex_init(&mMutex, NULL);
257
258    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
259        mDefaultMetadata[i] = NULL;
260
261#ifdef HAS_MULTIMEDIA_HINTS
262    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
263        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
264    }
265#endif
266}
267
268/*===========================================================================
269 * FUNCTION   : ~QCamera3HardwareInterface
270 *
271 * DESCRIPTION: destructor of QCamera3HardwareInterface
272 *
273 * PARAMETERS : none
274 *
275 * RETURN     : none
276 *==========================================================================*/
277QCamera3HardwareInterface::~QCamera3HardwareInterface()
278{
279    ALOGV("%s: E", __func__);
280    /* We need to stop all streams before deleting any stream */
281
282    // NOTE: 'camera3_stream_t *' objects are already freed at
283    //        this stage by the framework
284    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
285        it != mStreamInfo.end(); it++) {
286        QCamera3Channel *channel = (*it)->channel;
287        if (channel) {
288            channel->stop();
289        }
290    }
291    if (mSupportChannel)
292        mSupportChannel->stop();
293
294    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
295        it != mStreamInfo.end(); it++) {
296        QCamera3Channel *channel = (*it)->channel;
297        if (channel)
298            delete channel;
299        free (*it);
300    }
301    if (mSupportChannel) {
302        delete mSupportChannel;
303        mSupportChannel = NULL;
304    }
305
306    mPictureChannel = NULL;
307
308    /* Clean up all channels */
309    if (mCameraInitialized) {
310        if (mMetadataChannel) {
311            mMetadataChannel->stop();
312            delete mMetadataChannel;
313            mMetadataChannel = NULL;
314        }
315        deinitParameters();
316    }
317
318    if (mCameraOpened)
319        closeCamera();
320
321    mPendingBuffersMap.mPendingBufferList.clear();
322    mPendingRequestsList.clear();
323
324    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
325        if (mDefaultMetadata[i])
326            free_camera_metadata(mDefaultMetadata[i]);
327
328    pthread_cond_destroy(&mRequestCond);
329
330    pthread_mutex_destroy(&mMutex);
331    ALOGV("%s: X", __func__);
332}
333
334/*===========================================================================
335 * FUNCTION   : openCamera
336 *
337 * DESCRIPTION: open camera
338 *
339 * PARAMETERS :
340 *   @hw_device  : double ptr for camera device struct
341 *
342 * RETURN     : int32_t type of status
343 *              NO_ERROR  -- success
344 *              none-zero failure code
345 *==========================================================================*/
346int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
347{
348    int rc = 0;
349    pthread_mutex_lock(&mCameraSessionLock);
350    if (mCameraSessionActive) {
351        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
352        pthread_mutex_unlock(&mCameraSessionLock);
353        return -EUSERS;
354    }
355
356    if (mCameraOpened) {
357        *hw_device = NULL;
358        return PERMISSION_DENIED;
359    }
360
361    rc = openCamera();
362    if (rc == 0) {
363        *hw_device = &mCameraDevice.common;
364        mCameraSessionActive = 1;
365    } else
366        *hw_device = NULL;
367
368#ifdef HAS_MULTIMEDIA_HINTS
369    if (rc == 0) {
370        if (m_pPowerModule) {
371            if (m_pPowerModule->powerHint) {
372                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
373                        (void *)"state=1");
374            }
375        }
376    }
377#endif
378    pthread_mutex_unlock(&mCameraSessionLock);
379    return rc;
380}
381
382/*===========================================================================
383 * FUNCTION   : openCamera
384 *
385 * DESCRIPTION: open camera
386 *
387 * PARAMETERS : none
388 *
389 * RETURN     : int32_t type of status
390 *              NO_ERROR  -- success
391 *              none-zero failure code
392 *==========================================================================*/
393int QCamera3HardwareInterface::openCamera()
394{
395    if (mCameraHandle) {
396        ALOGE("Failure: Camera already opened");
397        return ALREADY_EXISTS;
398    }
399    mCameraHandle = camera_open(mCameraId);
400    if (!mCameraHandle) {
401        ALOGE("camera_open failed.");
402        return UNKNOWN_ERROR;
403    }
404
405    mCameraOpened = true;
406
407    return NO_ERROR;
408}
409
410/*===========================================================================
411 * FUNCTION   : closeCamera
412 *
413 * DESCRIPTION: close camera
414 *
415 * PARAMETERS : none
416 *
417 * RETURN     : int32_t type of status
418 *              NO_ERROR  -- success
419 *              none-zero failure code
420 *==========================================================================*/
421int QCamera3HardwareInterface::closeCamera()
422{
423    int rc = NO_ERROR;
424
425    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
426    mCameraHandle = NULL;
427    mCameraOpened = false;
428
429#ifdef HAS_MULTIMEDIA_HINTS
430    if (rc == NO_ERROR) {
431        if (m_pPowerModule) {
432            if (m_pPowerModule->powerHint) {
433                if(mHdrHint == true) {
434                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
435                            (void *)"state=3");
436                    mHdrHint = false;
437                }
438                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
439                        (void *)"state=0");
440            }
441        }
442    }
443#endif
444
445    return rc;
446}
447
448/*===========================================================================
449 * FUNCTION   : initialize
450 *
451 * DESCRIPTION: Initialize frameworks callback functions
452 *
453 * PARAMETERS :
454 *   @callback_ops : callback function to frameworks
455 *
456 * RETURN     :
457 *
458 *==========================================================================*/
459int QCamera3HardwareInterface::initialize(
460        const struct camera3_callback_ops *callback_ops)
461{
462    int rc;
463
464    pthread_mutex_lock(&mMutex);
465
466    rc = initParameters();
467    if (rc < 0) {
468        ALOGE("%s: initParamters failed %d", __func__, rc);
469       goto err1;
470    }
471    mCallbackOps = callback_ops;
472
473    pthread_mutex_unlock(&mMutex);
474    mCameraInitialized = true;
475    return 0;
476
477err1:
478    pthread_mutex_unlock(&mMutex);
479    return rc;
480}
481
482/*===========================================================================
483 * FUNCTION   : configureStreams
484 *
485 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
486 *              and output streams.
487 *
488 * PARAMETERS :
489 *   @stream_list : streams to be configured
490 *
491 * RETURN     :
492 *
493 *==========================================================================*/
494int QCamera3HardwareInterface::configureStreams(
495        camera3_stream_configuration_t *streamList)
496{
497    int rc = 0;
498
499    // Sanity check stream_list
500    if (streamList == NULL) {
501        ALOGE("%s: NULL stream configuration", __func__);
502        return BAD_VALUE;
503    }
504    if (streamList->streams == NULL) {
505        ALOGE("%s: NULL stream list", __func__);
506        return BAD_VALUE;
507    }
508
509    if (streamList->num_streams < 1) {
510        ALOGE("%s: Bad number of streams requested: %d", __func__,
511                streamList->num_streams);
512        return BAD_VALUE;
513    }
514
515    /* first invalidate all the steams in the mStreamList
516     * if they appear again, they will be validated */
517    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
518            it != mStreamInfo.end(); it++) {
519        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
520        channel->stop();
521        (*it)->status = INVALID;
522    }
523    if (mMetadataChannel) {
524        /* If content of mStreamInfo is not 0, there is metadata stream */
525        mMetadataChannel->stop();
526    }
527
528#ifdef HAS_MULTIMEDIA_HINTS
529    if(mHdrHint == true) {
530        if (m_pPowerModule) {
531            if (m_pPowerModule->powerHint) {
532                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
533                        (void *)"state=3");
534                mHdrHint = false;
535            }
536        }
537    }
538#endif
539
540    pthread_mutex_lock(&mMutex);
541
542    bool isZsl = false;
543    camera3_stream_t *inputStream = NULL;
544    camera3_stream_t *jpegStream = NULL;
545    cam_stream_size_info_t stream_config_info;
546
547    for (size_t i = 0; i < streamList->num_streams; i++) {
548        camera3_stream_t *newStream = streamList->streams[i];
549        ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
550                __func__, newStream->stream_type, newStream->format,
551                 newStream->width, newStream->height);
552        //if the stream is in the mStreamList validate it
553        bool stream_exists = false;
554        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
555                it != mStreamInfo.end(); it++) {
556            if ((*it)->stream == newStream) {
557                QCamera3Channel *channel =
558                    (QCamera3Channel*)(*it)->stream->priv;
559                stream_exists = true;
560                delete channel;
561                (*it)->status = VALID;
562                (*it)->stream->priv = NULL;
563                (*it)->channel = NULL;
564            }
565        }
566        if (!stream_exists) {
567            //new stream
568            stream_info_t* stream_info;
569            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
570            stream_info->stream = newStream;
571            stream_info->status = VALID;
572            stream_info->channel = NULL;
573            mStreamInfo.push_back(stream_info);
574        }
575        if (newStream->stream_type == CAMERA3_STREAM_INPUT
576                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
577            if (inputStream != NULL) {
578                ALOGE("%s: Multiple input streams requested!", __func__);
579                pthread_mutex_unlock(&mMutex);
580                return BAD_VALUE;
581            }
582            inputStream = newStream;
583        }
584        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
585            jpegStream = newStream;
586        }
587    }
588    mInputStream = inputStream;
589
590    cleanAndSortStreamInfo();
591    if (mMetadataChannel) {
592        delete mMetadataChannel;
593        mMetadataChannel = NULL;
594    }
595
596    //Create metadata channel and initialize it
597    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
598                    mCameraHandle->ops, captureResultCb,
599                    &gCamCapability[mCameraId]->padding_info, this);
600    if (mMetadataChannel == NULL) {
601        ALOGE("%s: failed to allocate metadata channel", __func__);
602        rc = -ENOMEM;
603        pthread_mutex_unlock(&mMutex);
604        return rc;
605    }
606    rc = mMetadataChannel->initialize();
607    if (rc < 0) {
608        ALOGE("%s: metadata channel initialization failed", __func__);
609        delete mMetadataChannel;
610        mMetadataChannel = NULL;
611        pthread_mutex_unlock(&mMutex);
612        return rc;
613    }
614
615    /* Create dummy stream if there is one single raw stream */
616    if (streamList->num_streams == 1 &&
617            (streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
618            streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16)) {
619        mSupportChannel = new QCamera3SupportChannel(
620                mCameraHandle->camera_handle,
621                mCameraHandle->ops,
622                &gCamCapability[mCameraId]->padding_info,
623                this);
624        if (!mSupportChannel) {
625            ALOGE("%s: dummy channel cannot be created", __func__);
626            pthread_mutex_unlock(&mMutex);
627            return -ENOMEM;
628        }
629
630        rc = mSupportChannel->initialize();
631        if (rc < 0) {
632            ALOGE("%s: dummy channel initialization failed", __func__);
633            delete mSupportChannel;
634            mSupportChannel = NULL;
635            delete mMetadataChannel;
636            mMetadataChannel = NULL;
637            pthread_mutex_unlock(&mMutex);
638            return rc;
639        }
640    }
641
642    /* Allocate channel objects for the requested streams */
643    for (size_t i = 0; i < streamList->num_streams; i++) {
644        camera3_stream_t *newStream = streamList->streams[i];
645        uint32_t stream_usage = newStream->usage;
646        stream_config_info.stream_sizes[i].width = newStream->width;
647        stream_config_info.stream_sizes[i].height = newStream->height;
648        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
649            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
650            //for zsl stream the size is active array size
651            isZsl = true;
652            stream_config_info.stream_sizes[i].width =
653                    gCamCapability[mCameraId]->active_array_size.width;
654            stream_config_info.stream_sizes[i].height =
655                    gCamCapability[mCameraId]->active_array_size.height;
656            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
657        } else {
658           //for non zsl streams find out the format
659           switch (newStream->format) {
660           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
661              {
662                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
663                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
664                 } else {
665                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
666                 }
667              }
668              break;
669           case HAL_PIXEL_FORMAT_YCbCr_420_888:
670              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
671#ifdef HAS_MULTIMEDIA_HINTS
672              if (m_pPowerModule) {
673                  if (m_pPowerModule->powerHint) {
674                      m_pPowerModule->powerHint(m_pPowerModule,
675                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
676                      mHdrHint = true;
677                  }
678              }
679#endif
680              break;
681           case HAL_PIXEL_FORMAT_BLOB:
682              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
683              break;
684           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
685           case HAL_PIXEL_FORMAT_RAW16:
686              stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
687              break;
688           default:
689              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
690              break;
691           }
692        }
693        if (newStream->priv == NULL) {
694            //New stream, construct channel
695            switch (newStream->stream_type) {
696            case CAMERA3_STREAM_INPUT:
697                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
698                break;
699            case CAMERA3_STREAM_BIDIRECTIONAL:
700                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
701                    GRALLOC_USAGE_HW_CAMERA_WRITE;
702                break;
703            case CAMERA3_STREAM_OUTPUT:
704                /* For video encoding stream, set read/write rarely
705                 * flag so that they may be set to un-cached */
706                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
707                    newStream->usage =
708                         (GRALLOC_USAGE_SW_READ_RARELY |
709                         GRALLOC_USAGE_SW_WRITE_RARELY |
710                         GRALLOC_USAGE_HW_CAMERA_WRITE);
711                else
712                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
713                break;
714            default:
715                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
716                break;
717            }
718
719            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
720                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
721                QCamera3Channel *channel = NULL;
722                switch (newStream->format) {
723                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
724                case HAL_PIXEL_FORMAT_YCbCr_420_888:
725                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
726                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
727                            mCameraHandle->ops, captureResultCb,
728                            &gCamCapability[mCameraId]->padding_info,
729                            this,
730                            newStream,
731                            (cam_stream_type_t) stream_config_info.type[i]);
732                    if (channel == NULL) {
733                        ALOGE("%s: allocation of channel failed", __func__);
734                        pthread_mutex_unlock(&mMutex);
735                        return -ENOMEM;
736                    }
737
738                    newStream->priv = channel;
739                    break;
740                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
741                case HAL_PIXEL_FORMAT_RAW16:
742                    newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
743                    mRawChannel = new QCamera3RawChannel(
744                            mCameraHandle->camera_handle,
745                            mCameraHandle->ops, captureResultCb,
746                            &gCamCapability[mCameraId]->padding_info,
747                            this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
748                    if (mRawChannel == NULL) {
749                        ALOGE("%s: allocation of raw channel failed", __func__);
750                        pthread_mutex_unlock(&mMutex);
751                        return -ENOMEM;
752                    }
753
754                    newStream->priv = (QCamera3Channel*)mRawChannel;
755                    break;
756                case HAL_PIXEL_FORMAT_BLOB:
757                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
758                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
759                            mCameraHandle->ops, captureResultCb,
760                            &gCamCapability[mCameraId]->padding_info, this, newStream);
761                    if (mPictureChannel == NULL) {
762                        ALOGE("%s: allocation of channel failed", __func__);
763                        pthread_mutex_unlock(&mMutex);
764                        return -ENOMEM;
765                    }
766                    newStream->priv = (QCamera3Channel*)mPictureChannel;
767                    break;
768
769                default:
770                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
771                    break;
772                }
773            }
774
775            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
776                    it != mStreamInfo.end(); it++) {
777                if ((*it)->stream == newStream) {
778                    (*it)->channel = (QCamera3Channel*) newStream->priv;
779                    break;
780                }
781            }
782        } else {
783            // Channel already exists for this stream
784            // Do nothing for now
785        }
786    }
787
788    if (isZsl)
789        mPictureChannel->overrideYuvSize(
790                gCamCapability[mCameraId]->active_array_size.width,
791                gCamCapability[mCameraId]->active_array_size.height);
792
793    int32_t hal_version = CAM_HAL_V3;
794    stream_config_info.num_streams = streamList->num_streams;
795    if (mSupportChannel) {
796        stream_config_info.stream_sizes[stream_config_info.num_streams] =
797                QCamera3SupportChannel::kDim;
798        stream_config_info.type[stream_config_info.num_streams] =
799                CAM_STREAM_TYPE_CALLBACK;
800        stream_config_info.num_streams++;
801    }
802
803    // settings/parameters don't carry over for new configureStreams
804    memset(mParameters, 0, sizeof(metadata_buffer_t));
805
806    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
807    AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
808                sizeof(hal_version), &hal_version);
809
810    AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
811                sizeof(stream_config_info), &stream_config_info);
812
813    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
814
815    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
816    mPendingRequestsList.clear();
817    mPendingFrameDropList.clear();
818    // Initialize/Reset the pending buffers list
819    mPendingBuffersMap.num_buffers = 0;
820    mPendingBuffersMap.mPendingBufferList.clear();
821
822    mFirstRequest = true;
823
824    //Get min frame duration for this streams configuration
825    deriveMinFrameDuration();
826
827    pthread_mutex_unlock(&mMutex);
828    return rc;
829}
830
831/*===========================================================================
832 * FUNCTION   : validateCaptureRequest
833 *
834 * DESCRIPTION: validate a capture request from camera service
835 *
836 * PARAMETERS :
837 *   @request : request from framework to process
838 *
839 * RETURN     :
840 *
841 *==========================================================================*/
842int QCamera3HardwareInterface::validateCaptureRequest(
843                    camera3_capture_request_t *request)
844{
845    ssize_t idx = 0;
846    const camera3_stream_buffer_t *b;
847    CameraMetadata meta;
848
849    /* Sanity check the request */
850    if (request == NULL) {
851        ALOGE("%s: NULL capture request", __func__);
852        return BAD_VALUE;
853    }
854
855    if (request->settings == NULL && mFirstRequest) {
856        /*settings cannot be null for the first request*/
857        return BAD_VALUE;
858    }
859
860    uint32_t frameNumber = request->frame_number;
861    if (request->input_buffer != NULL &&
862            request->input_buffer->stream != mInputStream) {
863        ALOGE("%s: Request %d: Input buffer not from input stream!",
864                __FUNCTION__, frameNumber);
865        return BAD_VALUE;
866    }
867    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
868        ALOGE("%s: Request %d: No output buffers provided!",
869                __FUNCTION__, frameNumber);
870        return BAD_VALUE;
871    }
872    if (request->input_buffer != NULL) {
873        b = request->input_buffer;
874        QCamera3Channel *channel =
875            static_cast<QCamera3Channel*>(b->stream->priv);
876        if (channel == NULL) {
877            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
878                    __func__, frameNumber, idx);
879            return BAD_VALUE;
880        }
881        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
882            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
883                    __func__, frameNumber, idx);
884            return BAD_VALUE;
885        }
886        if (b->release_fence != -1) {
887            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
888                    __func__, frameNumber, idx);
889            return BAD_VALUE;
890        }
891        if (b->buffer == NULL) {
892            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
893                    __func__, frameNumber, idx);
894            return BAD_VALUE;
895        }
896    }
897
898    // Validate all buffers
899    b = request->output_buffers;
900    do {
901        QCamera3Channel *channel =
902                static_cast<QCamera3Channel*>(b->stream->priv);
903        if (channel == NULL) {
904            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
905                    __func__, frameNumber, idx);
906            return BAD_VALUE;
907        }
908        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
909            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
910                    __func__, frameNumber, idx);
911            return BAD_VALUE;
912        }
913        if (b->release_fence != -1) {
914            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
915                    __func__, frameNumber, idx);
916            return BAD_VALUE;
917        }
918        if (b->buffer == NULL) {
919            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
920                    __func__, frameNumber, idx);
921            return BAD_VALUE;
922        }
923        idx++;
924        b = request->output_buffers + idx;
925    } while (idx < (ssize_t)request->num_output_buffers);
926
927    return NO_ERROR;
928}
929
930/*===========================================================================
931 * FUNCTION   : deriveMinFrameDuration
932 *
933 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
934 *              on currently configured streams.
935 *
936 * PARAMETERS : NONE
937 *
938 * RETURN     : NONE
939 *
940 *==========================================================================*/
941void QCamera3HardwareInterface::deriveMinFrameDuration()
942{
943    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
944
945    maxJpegDim = 0;
946    maxProcessedDim = 0;
947    maxRawDim = 0;
948
949    // Figure out maximum jpeg, processed, and raw dimensions
950    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
951        it != mStreamInfo.end(); it++) {
952
953        // Input stream doesn't have valid stream_type
954        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
955            continue;
956
957        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
958        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
959            if (dimension > maxJpegDim)
960                maxJpegDim = dimension;
961        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
962                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
963            if (dimension > maxRawDim)
964                maxRawDim = dimension;
965        } else {
966            if (dimension > maxProcessedDim)
967                maxProcessedDim = dimension;
968        }
969    }
970
971    //Assume all jpeg dimensions are in processed dimensions.
972    if (maxJpegDim > maxProcessedDim)
973        maxProcessedDim = maxJpegDim;
974    //Find the smallest raw dimension that is greater or equal to jpeg dimension
975    if (maxProcessedDim > maxRawDim) {
976        maxRawDim = INT32_MAX;
977        for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
978            i++) {
979
980            int32_t dimension =
981                gCamCapability[mCameraId]->raw_dim[i].width *
982                gCamCapability[mCameraId]->raw_dim[i].height;
983
984            if (dimension >= maxProcessedDim && dimension < maxRawDim)
985                maxRawDim = dimension;
986        }
987    }
988
989    //Find minimum durations for processed, jpeg, and raw
990    for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
991            i++) {
992        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
993                gCamCapability[mCameraId]->raw_dim[i].height) {
994            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
995            break;
996        }
997    }
998    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
999        if (maxProcessedDim ==
1000            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1001            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1002            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1003            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1004            break;
1005        }
1006    }
1007}
1008
1009/*===========================================================================
1010 * FUNCTION   : getMinFrameDuration
1011 *
1012 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1013 *              and current request configuration.
1014 *
1015 * PARAMETERS : @request: requset sent by the frameworks
1016 *
1017 * RETURN     : min farme duration for a particular request
1018 *
1019 *==========================================================================*/
1020int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1021{
1022    bool hasJpegStream = false;
1023    bool hasRawStream = false;
1024    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1025        const camera3_stream_t *stream = request->output_buffers[i].stream;
1026        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1027            hasJpegStream = true;
1028        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1029                stream->format == HAL_PIXEL_FORMAT_RAW16)
1030            hasRawStream = true;
1031    }
1032
1033    if (!hasJpegStream)
1034        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1035    else
1036        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1037}
1038
1039/*===========================================================================
1040 * FUNCTION   : handleMetadataWithLock
1041 *
1042 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1043 *
1044 * PARAMETERS : @metadata_buf: metadata buffer
1045 *
1046 * RETURN     :
1047 *
1048 *==========================================================================*/
1049void QCamera3HardwareInterface::handleMetadataWithLock(
1050    mm_camera_super_buf_t *metadata_buf)
1051{
1052    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1053    int32_t frame_number_valid = *(int32_t *)
1054        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1055    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1056        CAM_INTF_META_PENDING_REQUESTS, metadata);
1057    uint32_t frame_number = *(uint32_t *)
1058        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1059    const struct timeval *tv = (const struct timeval *)
1060        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1061    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1062        tv->tv_usec * NSEC_PER_USEC;
1063    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1064        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1065
1066    int32_t urgent_frame_number_valid = *(int32_t *)
1067        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1068    uint32_t urgent_frame_number = *(uint32_t *)
1069        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1070
1071    if (urgent_frame_number_valid) {
1072        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1073          __func__, urgent_frame_number, capture_time);
1074
1075        //Recieved an urgent Frame Number, handle it
1076        //using HAL3.1 quirk for partial results
1077        for (List<PendingRequestInfo>::iterator i =
1078            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1079            camera3_notify_msg_t notify_msg;
1080            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1081                __func__, i->frame_number, urgent_frame_number);
1082
1083            if (i->frame_number < urgent_frame_number &&
1084                i->bNotified == 0) {
1085                notify_msg.type = CAMERA3_MSG_SHUTTER;
1086                notify_msg.message.shutter.frame_number = i->frame_number;
1087                notify_msg.message.shutter.timestamp = capture_time -
1088                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1089                mCallbackOps->notify(mCallbackOps, &notify_msg);
1090                i->timestamp = notify_msg.message.shutter.timestamp;
1091                i->bNotified = 1;
1092                ALOGV("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
1093                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1094            }
1095
1096            if (i->frame_number == urgent_frame_number) {
1097
1098                camera3_capture_result_t result;
1099
1100                // Send shutter notify to frameworks
1101                notify_msg.type = CAMERA3_MSG_SHUTTER;
1102                notify_msg.message.shutter.frame_number = i->frame_number;
1103                notify_msg.message.shutter.timestamp = capture_time;
1104                mCallbackOps->notify(mCallbackOps, &notify_msg);
1105
1106                i->timestamp = capture_time;
1107                i->bNotified = 1;
1108
1109                // Extract 3A metadata
1110                result.result =
1111                    translateCbUrgentMetadataToResultMetadata(metadata);
1112                // Populate metadata result
1113                result.frame_number = urgent_frame_number;
1114                result.num_output_buffers = 0;
1115                result.output_buffers = NULL;
1116                mCallbackOps->process_capture_result(mCallbackOps, &result);
1117                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1118                     __func__, result.frame_number, capture_time);
1119                free_camera_metadata((camera_metadata_t *)result.result);
1120                break;
1121            }
1122        }
1123    }
1124
1125    if (!frame_number_valid) {
1126        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1127        mMetadataChannel->bufDone(metadata_buf);
1128        free(metadata_buf);
1129        goto done_metadata;
1130    }
1131    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1132            frame_number, capture_time);
1133
1134    // Go through the pending requests info and send shutter/results to frameworks
1135    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1136        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1137        camera3_capture_result_t result;
1138        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1139
1140        // Flush out all entries with less or equal frame numbers.
1141        mPendingRequest--;
1142
1143        // Check whether any stream buffer corresponding to this is dropped or not
1144        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1145        // buffer with CAMERA3_BUFFER_STATUS_ERROR
1146        if (cam_frame_drop.frame_dropped) {
1147            camera3_notify_msg_t notify_msg;
1148            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1149                    j != i->buffers.end(); j++) {
1150                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1151                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1152                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1153                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1154                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1155                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1156                             __func__, i->frame_number, streamID);
1157                      notify_msg.type = CAMERA3_MSG_ERROR;
1158                      notify_msg.message.error.frame_number = i->frame_number;
1159                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1160                      notify_msg.message.error.error_stream = j->stream;
1161                      mCallbackOps->notify(mCallbackOps, &notify_msg);
1162                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1163                             __func__, i->frame_number, streamID);
1164                      PendingFrameDropInfo PendingFrameDrop;
1165                      PendingFrameDrop.frame_number=i->frame_number;
1166                      PendingFrameDrop.stream_ID = streamID;
1167                      // Add the Frame drop info to mPendingFrameDropList
1168                      mPendingFrameDropList.push_back(PendingFrameDrop);
1169                  }
1170                }
1171            }
1172        }
1173
1174        // Send empty metadata with already filled buffers for dropped metadata
1175        // and send valid metadata with already filled buffers for current metadata
1176        if (i->frame_number < frame_number) {
1177            CameraMetadata dummyMetadata;
1178            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1179                    &i->timestamp, 1);
1180            dummyMetadata.update(ANDROID_REQUEST_ID,
1181                    &(i->request_id), 1);
1182            result.result = dummyMetadata.release();
1183        } else {
1184            result.result = translateFromHalMetadata(metadata,
1185                    i->timestamp, i->request_id, i->jpegMetadata);
1186
1187            if (i->blob_request) {
1188                {
1189                    //Dump tuning metadata if enabled and available
1190                    char prop[PROPERTY_VALUE_MAX];
1191                    memset(prop, 0, sizeof(prop));
1192                    property_get("persist.camera.dumpmetadata", prop, "0");
1193                    int32_t enabled = atoi(prop);
1194                    if (enabled && metadata->is_tuning_params_valid) {
1195                        dumpMetadataToFile(metadata->tuning_params,
1196                               mMetaFrameCount,
1197                               enabled,
1198                               "Snapshot",
1199                               frame_number);
1200                    }
1201                }
1202
1203                //If it is a blob request then send the metadata to the picture channel
1204                metadata_buffer_t *reproc_meta =
1205                        (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
1206                if (reproc_meta == NULL) {
1207                    ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
1208                    goto done_metadata;
1209                }
1210                *reproc_meta = *metadata;
1211                mPictureChannel->queueReprocMetadata(reproc_meta);
1212            }
1213            // Return metadata buffer
1214            mMetadataChannel->bufDone(metadata_buf);
1215            free(metadata_buf);
1216        }
1217        if (!result.result) {
1218            ALOGE("%s: metadata is NULL", __func__);
1219        }
1220        result.frame_number = i->frame_number;
1221        result.num_output_buffers = 0;
1222        result.output_buffers = NULL;
1223        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1224                    j != i->buffers.end(); j++) {
1225            if (j->buffer) {
1226                result.num_output_buffers++;
1227            }
1228        }
1229
1230        if (result.num_output_buffers > 0) {
1231            camera3_stream_buffer_t *result_buffers =
1232                new camera3_stream_buffer_t[result.num_output_buffers];
1233            if (!result_buffers) {
1234                ALOGE("%s: Fatal error: out of memory", __func__);
1235            }
1236            size_t result_buffers_idx = 0;
1237            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1238                    j != i->buffers.end(); j++) {
1239                if (j->buffer) {
1240                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1241                            m != mPendingFrameDropList.end(); m++) {
1242                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1243                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1244                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1245                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1246                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1247                                  __func__, frame_number, streamID);
1248                            m = mPendingFrameDropList.erase(m);
1249                            break;
1250                        }
1251                    }
1252
1253                    for (List<PendingBufferInfo>::iterator k =
1254                      mPendingBuffersMap.mPendingBufferList.begin();
1255                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1256                      if (k->buffer == j->buffer->buffer) {
1257                        ALOGV("%s: Found buffer %p in pending buffer List "
1258                              "for frame %d, Take it out!!", __func__,
1259                               k->buffer, k->frame_number);
1260                        mPendingBuffersMap.num_buffers--;
1261                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
1262                        break;
1263                      }
1264                    }
1265
1266                    result_buffers[result_buffers_idx++] = *(j->buffer);
1267                    free(j->buffer);
1268                    j->buffer = NULL;
1269                }
1270            }
1271            result.output_buffers = result_buffers;
1272
1273            mCallbackOps->process_capture_result(mCallbackOps, &result);
1274            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1275                    __func__, result.frame_number, i->timestamp);
1276            free_camera_metadata((camera_metadata_t *)result.result);
1277            delete[] result_buffers;
1278        } else {
1279            mCallbackOps->process_capture_result(mCallbackOps, &result);
1280            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1281                        __func__, result.frame_number, i->timestamp);
1282            free_camera_metadata((camera_metadata_t *)result.result);
1283        }
1284        // erase the element from the list
1285        i = mPendingRequestsList.erase(i);
1286    }
1287
1288done_metadata:
1289    if (!pending_requests)
1290        unblockRequestIfNecessary();
1291
1292}
1293
1294/*===========================================================================
1295 * FUNCTION   : handleBufferWithLock
1296 *
1297 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1298 *
1299 * PARAMETERS : @buffer: image buffer for the callback
1300 *              @frame_number: frame number of the image buffer
1301 *
1302 * RETURN     :
1303 *
1304 *==========================================================================*/
1305void QCamera3HardwareInterface::handleBufferWithLock(
1306    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1307{
1308    // If the frame number doesn't exist in the pending request list,
1309    // directly send the buffer to the frameworks, and update pending buffers map
1310    // Otherwise, book-keep the buffer.
1311    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1312    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1313        i++;
1314    }
1315    if (i == mPendingRequestsList.end()) {
1316        // Verify all pending requests frame_numbers are greater
1317        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1318                j != mPendingRequestsList.end(); j++) {
1319            if (j->frame_number < frame_number) {
1320                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1321                        __func__, j->frame_number, frame_number);
1322            }
1323        }
1324        camera3_capture_result_t result;
1325        result.result = NULL;
1326        result.frame_number = frame_number;
1327        result.num_output_buffers = 1;
1328        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1329                m != mPendingFrameDropList.end(); m++) {
1330            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1331            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1332            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1333                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1334                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1335                        __func__, frame_number, streamID);
1336                m = mPendingFrameDropList.erase(m);
1337                break;
1338            }
1339        }
1340        result.output_buffers = buffer;
1341        ALOGV("%s: result frame_number = %d, buffer = %p",
1342                __func__, frame_number, buffer->buffer);
1343
1344        for (List<PendingBufferInfo>::iterator k =
1345                mPendingBuffersMap.mPendingBufferList.begin();
1346                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1347            if (k->buffer == buffer->buffer) {
1348                ALOGV("%s: Found Frame buffer, take it out from list",
1349                        __func__);
1350
1351                mPendingBuffersMap.num_buffers--;
1352                k = mPendingBuffersMap.mPendingBufferList.erase(k);
1353                break;
1354            }
1355        }
1356        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1357            __func__, mPendingBuffersMap.num_buffers);
1358
1359        mCallbackOps->process_capture_result(mCallbackOps, &result);
1360    } else {
1361        if (i->input_buffer_present) {
1362            camera3_capture_result result;
1363            result.result = NULL;
1364            result.frame_number = frame_number;
1365            result.num_output_buffers = 1;
1366            result.output_buffers = buffer;
1367            mCallbackOps->process_capture_result(mCallbackOps, &result);
1368            i = mPendingRequestsList.erase(i);
1369            mPendingRequest--;
1370        } else {
1371            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1372                j != i->buffers.end(); j++) {
1373                if (j->stream == buffer->stream) {
1374                    if (j->buffer != NULL) {
1375                        ALOGE("%s: Error: buffer is already set", __func__);
1376                    } else {
1377                        j->buffer = (camera3_stream_buffer_t *)malloc(
1378                            sizeof(camera3_stream_buffer_t));
1379                        *(j->buffer) = *buffer;
1380                        ALOGV("%s: cache buffer %p at result frame_number %d",
1381                            __func__, buffer, frame_number);
1382                    }
1383                }
1384            }
1385        }
1386    }
1387}
1388
1389/*===========================================================================
1390 * FUNCTION   : unblockRequestIfNecessary
1391 *
1392 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1393 *              that mMutex is held when this function is called.
1394 *
1395 * PARAMETERS :
1396 *
1397 * RETURN     :
1398 *
1399 *==========================================================================*/
1400void QCamera3HardwareInterface::unblockRequestIfNecessary()
1401{
1402   // Unblock process_capture_request
1403   pthread_cond_signal(&mRequestCond);
1404}
1405
1406/*===========================================================================
1407 * FUNCTION   : registerStreamBuffers
1408 *
1409 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1410 *
1411 * PARAMETERS :
1412 *   @stream_list : streams to be configured
1413 *
1414 * RETURN     :
1415 *
1416 *==========================================================================*/
1417int QCamera3HardwareInterface::registerStreamBuffers(
1418        const camera3_stream_buffer_set_t * /*buffer_set*/)
1419{
1420    //Deprecated
1421    return NO_ERROR;
1422}
1423
1424/*===========================================================================
1425 * FUNCTION   : processCaptureRequest
1426 *
1427 * DESCRIPTION: process a capture request from camera service
1428 *
1429 * PARAMETERS :
1430 *   @request : request from framework to process
1431 *
1432 * RETURN     :
1433 *
1434 *==========================================================================*/
1435int QCamera3HardwareInterface::processCaptureRequest(
1436                    camera3_capture_request_t *request)
1437{
1438    int rc = NO_ERROR;
1439    int32_t request_id;
1440    CameraMetadata meta;
1441
1442    pthread_mutex_lock(&mMutex);
1443
1444    rc = validateCaptureRequest(request);
1445    if (rc != NO_ERROR) {
1446        ALOGE("%s: incoming request is not valid", __func__);
1447        pthread_mutex_unlock(&mMutex);
1448        return rc;
1449    }
1450
1451    meta = request->settings;
1452
1453    // For first capture request, send capture intent, and
1454    // stream on all streams
1455    if (mFirstRequest) {
1456
1457        for (size_t i = 0; i < request->num_output_buffers; i++) {
1458            const camera3_stream_buffer_t& output = request->output_buffers[i];
1459            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1460            rc = channel->registerBuffer(output.buffer);
1461            if (rc < 0) {
1462                ALOGE("%s: registerBuffer failed",
1463                        __func__);
1464                pthread_mutex_unlock(&mMutex);
1465                return -ENODEV;
1466            }
1467        }
1468
1469        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1470            int32_t hal_version = CAM_HAL_V3;
1471            uint8_t captureIntent =
1472                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1473
1474            memset(mParameters, 0, sizeof(metadata_buffer_t));
1475            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1476            AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1477                sizeof(hal_version), &hal_version);
1478            AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1479                sizeof(captureIntent), &captureIntent);
1480            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1481                mParameters);
1482        }
1483
1484        ALOGD("%s: Start META Channel", __func__);
1485        mMetadataChannel->start();
1486
1487        if (mSupportChannel)
1488            mSupportChannel->start();
1489
1490        //First initialize all streams
1491        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1492            it != mStreamInfo.end(); it++) {
1493            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1494            rc = channel->initialize();
1495            if (NO_ERROR != rc) {
1496                ALOGE("%s : Channel initialization failed %d", __func__, rc);
1497                if (mSupportChannel)
1498                    mSupportChannel->stop();
1499                mMetadataChannel->stop();
1500                pthread_mutex_unlock(&mMutex);
1501                return rc;
1502            }
1503        }
1504        //Then start them.
1505        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1506            it != mStreamInfo.end(); it++) {
1507            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1508            ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
1509            channel->start();
1510        }
1511    }
1512
1513    uint32_t frameNumber = request->frame_number;
1514    cam_stream_ID_t streamID;
1515
1516    if (meta.exists(ANDROID_REQUEST_ID)) {
1517        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1518        mCurrentRequestId = request_id;
1519        ALOGV("%s: Received request with id: %d",__func__, request_id);
1520    } else if (mFirstRequest || mCurrentRequestId == -1){
1521        ALOGE("%s: Unable to find request id field, \
1522                & no previous id available", __func__);
1523        return NAME_NOT_FOUND;
1524    } else {
1525        ALOGV("%s: Re-using old request id", __func__);
1526        request_id = mCurrentRequestId;
1527    }
1528
1529    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1530                                    __func__, __LINE__,
1531                                    request->num_output_buffers,
1532                                    request->input_buffer,
1533                                    frameNumber);
1534    // Acquire all request buffers first
1535    streamID.num_streams = 0;
1536    int blob_request = 0;
1537    for (size_t i = 0; i < request->num_output_buffers; i++) {
1538        const camera3_stream_buffer_t& output = request->output_buffers[i];
1539        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1540        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1541
1542        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1543            //Call function to store local copy of jpeg data for encode params.
1544            blob_request = 1;
1545        }
1546
1547        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1548        if (rc != OK) {
1549            ALOGE("%s: fence wait failed %d", __func__, rc);
1550            pthread_mutex_unlock(&mMutex);
1551            return rc;
1552        }
1553
1554        streamID.streamID[streamID.num_streams] =
1555            channel->getStreamID(channel->getStreamTypeMask());
1556        streamID.num_streams++;
1557    }
1558
1559    if(request->input_buffer == NULL) {
1560       rc = setFrameParameters(request, streamID);
1561        if (rc < 0) {
1562            ALOGE("%s: fail to set frame parameters", __func__);
1563            pthread_mutex_unlock(&mMutex);
1564            return rc;
1565        }
1566    }
1567
1568    /* Update pending request list and pending buffers map */
1569    PendingRequestInfo pendingRequest;
1570    pendingRequest.frame_number = frameNumber;
1571    pendingRequest.num_buffers = request->num_output_buffers;
1572    pendingRequest.request_id = request_id;
1573    pendingRequest.blob_request = blob_request;
1574    pendingRequest.bNotified = 0;
1575    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1576    extractJpegMetadata(pendingRequest.jpegMetadata, request);
1577
1578    for (size_t i = 0; i < request->num_output_buffers; i++) {
1579        RequestedBufferInfo requestedBuf;
1580        requestedBuf.stream = request->output_buffers[i].stream;
1581        requestedBuf.buffer = NULL;
1582        pendingRequest.buffers.push_back(requestedBuf);
1583
1584        // Add to buffer handle the pending buffers list
1585        PendingBufferInfo bufferInfo;
1586        bufferInfo.frame_number = frameNumber;
1587        bufferInfo.buffer = request->output_buffers[i].buffer;
1588        bufferInfo.stream = request->output_buffers[i].stream;
1589        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1590        mPendingBuffersMap.num_buffers++;
1591        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1592          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1593          bufferInfo.stream->format);
1594    }
1595    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1596          __func__, mPendingBuffersMap.num_buffers);
1597
1598    mPendingRequestsList.push_back(pendingRequest);
1599
1600    // Notify metadata channel we receive a request
1601    mMetadataChannel->request(NULL, frameNumber);
1602
1603    // Call request on other streams
1604    for (size_t i = 0; i < request->num_output_buffers; i++) {
1605        const camera3_stream_buffer_t& output = request->output_buffers[i];
1606        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1607        mm_camera_buf_def_t *pInputBuffer = NULL;
1608
1609        if (channel == NULL) {
1610            ALOGE("%s: invalid channel pointer for stream", __func__);
1611            continue;
1612        }
1613
1614        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1615            QCamera3RegularChannel* inputChannel = NULL;
1616            if(request->input_buffer != NULL){
1617
1618                //Try to get the internal format
1619                inputChannel = (QCamera3RegularChannel*)
1620                    request->input_buffer->stream->priv;
1621                if(inputChannel == NULL ){
1622                    ALOGE("%s: failed to get input channel handle", __func__);
1623                } else {
1624                    pInputBuffer =
1625                        inputChannel->getInternalFormatBuffer(
1626                                request->input_buffer->buffer);
1627                    ALOGD("%s: Input buffer dump",__func__);
1628                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1629                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1630                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1631                    ALOGD("Handle:%p", request->input_buffer->buffer);
1632                }
1633                rc = channel->request(output.buffer, frameNumber,
1634                            pInputBuffer, mParameters);
1635                if (rc < 0) {
1636                    ALOGE("%s: Fail to request on picture channel", __func__);
1637                    pthread_mutex_unlock(&mMutex);
1638                    return rc;
1639                }
1640
1641                rc = setReprocParameters(request);
1642                if (rc < 0) {
1643                    ALOGE("%s: fail to set reproc parameters", __func__);
1644                    pthread_mutex_unlock(&mMutex);
1645                    return rc;
1646                }
1647            } else
1648                rc = channel->request(output.buffer, frameNumber,
1649                            NULL, mParameters);
1650        } else {
1651            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1652                __LINE__, output.buffer, frameNumber);
1653           rc = channel->request(output.buffer, frameNumber);
1654        }
1655        if (rc < 0)
1656            ALOGE("%s: request failed", __func__);
1657    }
1658
1659    mFirstRequest = false;
1660    // Added a timed condition wait
1661    struct timespec ts;
1662    uint8_t isValidTimeout = 1;
1663    rc = clock_gettime(CLOCK_REALTIME, &ts);
1664    if (rc < 0) {
1665        isValidTimeout = 0;
1666        ALOGE("%s: Error reading the real time clock!!", __func__);
1667    }
1668    else {
1669        // Make timeout as 5 sec for request to be honored
1670        ts.tv_sec += 5;
1671    }
1672    //Block on conditional variable
1673
1674    mPendingRequest++;
1675    while (mPendingRequest >= kMaxInFlight) {
1676        if (!isValidTimeout) {
1677            ALOGV("%s: Blocking on conditional wait", __func__);
1678            pthread_cond_wait(&mRequestCond, &mMutex);
1679        }
1680        else {
1681            ALOGV("%s: Blocking on timed conditional wait", __func__);
1682            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1683            if (rc == ETIMEDOUT) {
1684                rc = -ENODEV;
1685                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1686                break;
1687            }
1688        }
1689        ALOGV("%s: Unblocked", __func__);
1690    }
1691    pthread_mutex_unlock(&mMutex);
1692
1693    return rc;
1694}
1695
1696/*===========================================================================
1697 * FUNCTION   : dump
1698 *
1699 * DESCRIPTION:
1700 *
1701 * PARAMETERS :
1702 *
1703 *
1704 * RETURN     :
1705 *==========================================================================*/
1706void QCamera3HardwareInterface::dump(int /*fd*/)
1707{
1708    /*Enable lock when we implement this function*/
1709    /*
1710    pthread_mutex_lock(&mMutex);
1711
1712    pthread_mutex_unlock(&mMutex);
1713    */
1714    return;
1715}
1716
1717/*===========================================================================
1718 * FUNCTION   : flush
1719 *
1720 * DESCRIPTION:
1721 *
1722 * PARAMETERS :
1723 *
1724 *
1725 * RETURN     :
1726 *==========================================================================*/
1727int QCamera3HardwareInterface::flush()
1728{
1729
1730    unsigned int frameNum = 0;
1731    camera3_notify_msg_t notify_msg;
1732    camera3_capture_result_t result;
1733    camera3_stream_buffer_t pStream_Buf;
1734
1735    ALOGV("%s: Unblocking Process Capture Request", __func__);
1736
1737    // Stop the Streams/Channels
1738    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1739        it != mStreamInfo.end(); it++) {
1740        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1741        channel->stop();
1742        (*it)->status = INVALID;
1743    }
1744
1745    if (mSupportChannel) {
1746        mSupportChannel->stop();
1747    }
1748    if (mMetadataChannel) {
1749        /* If content of mStreamInfo is not 0, there is metadata stream */
1750        mMetadataChannel->stop();
1751    }
1752
1753    // Mutex Lock
1754    pthread_mutex_lock(&mMutex);
1755
1756    // Unblock process_capture_request
1757    mPendingRequest = 0;
1758    pthread_cond_signal(&mRequestCond);
1759
1760    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1761    frameNum = i->frame_number;
1762    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
1763      __func__, frameNum);
1764
1765    // Go through the pending buffers and send buffer errors
1766    for (List<PendingBufferInfo>::iterator k =
1767         mPendingBuffersMap.mPendingBufferList.begin();
1768         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
1769         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1770          __func__, k->frame_number, k->buffer, k->stream,
1771          k->stream->format);
1772
1773        if (k->frame_number < frameNum) {
1774            // Send Error notify to frameworks for each buffer for which
1775            // metadata buffer is already sent
1776            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
1777              __func__, k->frame_number, k->buffer);
1778
1779            notify_msg.type = CAMERA3_MSG_ERROR;
1780            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1781            notify_msg.message.error.error_stream = k->stream;
1782            notify_msg.message.error.frame_number = k->frame_number;
1783            mCallbackOps->notify(mCallbackOps, &notify_msg);
1784            ALOGV("%s: notify frame_number = %d", __func__,
1785                    i->frame_number);
1786
1787            pStream_Buf.acquire_fence = -1;
1788            pStream_Buf.release_fence = -1;
1789            pStream_Buf.buffer = k->buffer;
1790            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1791            pStream_Buf.stream = k->stream;
1792
1793            result.result = NULL;
1794            result.frame_number = k->frame_number;
1795            result.num_output_buffers = 1;
1796            result.output_buffers = &pStream_Buf ;
1797            mCallbackOps->process_capture_result(mCallbackOps, &result);
1798
1799            mPendingBuffersMap.num_buffers--;
1800            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1801        }
1802        else {
1803          k++;
1804        }
1805    }
1806
1807    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1808
1809    // Go through the pending requests info and send error request to framework
1810    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
1811        int numBuffers = 0;
1812        ALOGV("%s:Sending ERROR REQUEST for frame %d",
1813              __func__, i->frame_number);
1814
1815        // Send shutter notify to frameworks
1816        notify_msg.type = CAMERA3_MSG_ERROR;
1817        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
1818        notify_msg.message.error.error_stream = NULL;
1819        notify_msg.message.error.frame_number = i->frame_number;
1820        mCallbackOps->notify(mCallbackOps, &notify_msg);
1821
1822        result.frame_number = i->frame_number;
1823        result.num_output_buffers = 0;
1824        result.output_buffers = NULL;
1825        numBuffers = 0;
1826
1827        for (List<PendingBufferInfo>::iterator k =
1828             mPendingBuffersMap.mPendingBufferList.begin();
1829             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
1830          if (k->frame_number == i->frame_number) {
1831            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
1832                   " stream = %p, stream format = %d",__func__,
1833                   k->frame_number, k->buffer, k->stream, k->stream->format);
1834
1835            pStream_Buf.acquire_fence = -1;
1836            pStream_Buf.release_fence = -1;
1837            pStream_Buf.buffer = k->buffer;
1838            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1839            pStream_Buf.stream = k->stream;
1840
1841            result.num_output_buffers = 1;
1842            result.output_buffers = &pStream_Buf;
1843            result.result = NULL;
1844            result.frame_number = i->frame_number;
1845
1846            mCallbackOps->process_capture_result(mCallbackOps, &result);
1847            mPendingBuffersMap.num_buffers--;
1848            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1849            numBuffers++;
1850          }
1851          else {
1852            k++;
1853          }
1854        }
1855        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1856              __func__, mPendingBuffersMap.num_buffers);
1857
1858        i = mPendingRequestsList.erase(i);
1859    }
1860
1861    /* Reset pending buffer list and requests list */
1862    mPendingRequestsList.clear();
1863    /* Reset pending frame Drop list and requests list */
1864    mPendingFrameDropList.clear();
1865
1866    mPendingBuffersMap.num_buffers = 0;
1867    mPendingBuffersMap.mPendingBufferList.clear();
1868    ALOGV("%s: Cleared all the pending buffers ", __func__);
1869
1870    mFirstRequest = true;
1871    pthread_mutex_unlock(&mMutex);
1872    return 0;
1873}
1874
1875/*===========================================================================
1876 * FUNCTION   : captureResultCb
1877 *
1878 * DESCRIPTION: Callback handler for all capture result
1879 *              (streams, as well as metadata)
1880 *
1881 * PARAMETERS :
1882 *   @metadata : metadata information
1883 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1884 *               NULL if metadata.
1885 *
1886 * RETURN     : NONE
1887 *==========================================================================*/
1888void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1889                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1890{
1891    pthread_mutex_lock(&mMutex);
1892
1893    /* Assume flush() is called before any reprocessing. Send
1894     * notify and result immediately upon receipt of any callback*/
1895    if (mLoopBackResult) {
1896        /* Send notify */
1897        camera3_notify_msg_t notify_msg;
1898        notify_msg.type = CAMERA3_MSG_SHUTTER;
1899        notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
1900        notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
1901        mCallbackOps->notify(mCallbackOps, &notify_msg);
1902
1903        /* Send capture result */
1904        mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
1905        free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
1906        free(mLoopBackResult);
1907        mLoopBackResult = NULL;
1908    }
1909
1910    if (metadata_buf)
1911        handleMetadataWithLock(metadata_buf);
1912    else
1913        handleBufferWithLock(buffer, frame_number);
1914
1915    pthread_mutex_unlock(&mMutex);
1916    return;
1917}
1918
1919/*===========================================================================
1920 * FUNCTION   : translateFromHalMetadata
1921 *
1922 * DESCRIPTION:
1923 *
1924 * PARAMETERS :
1925 *   @metadata : metadata information from callback
1926 *
1927 * RETURN     : camera_metadata_t*
1928 *              metadata in a format specified by fwk
1929 *==========================================================================*/
1930camera_metadata_t*
1931QCamera3HardwareInterface::translateFromHalMetadata(
1932                                 metadata_buffer_t *metadata,
1933                                 nsecs_t timestamp,
1934                                 int32_t request_id,
1935                                 const CameraMetadata& jpegMetadata)
1936{
1937    CameraMetadata camMetadata;
1938    camera_metadata_t* resultMetadata;
1939
1940    if (jpegMetadata.entryCount())
1941        camMetadata.append(jpegMetadata);
1942
1943    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1944    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1945
1946    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1947    uint8_t next_entry;
1948    while (curr_entry != CAM_INTF_PARM_MAX) {
1949       switch (curr_entry) {
1950         case CAM_INTF_META_FRAME_NUMBER:{
1951             int64_t frame_number = *(uint32_t *) POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1952             camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
1953             break;
1954         }
1955         case CAM_INTF_META_FACE_DETECTION:{
1956             cam_face_detection_data_t *faceDetectionInfo =
1957                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1958             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1959             int32_t faceIds[MAX_ROI];
1960             uint8_t faceScores[MAX_ROI];
1961             int32_t faceRectangles[MAX_ROI * 4];
1962             int32_t faceLandmarks[MAX_ROI * 6];
1963             int j = 0, k = 0;
1964             for (int i = 0; i < numFaces; i++) {
1965                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1966                 faceScores[i] = faceDetectionInfo->faces[i].score;
1967                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1968                         faceRectangles+j, -1);
1969                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1970                 j+= 4;
1971                 k+= 6;
1972             }
1973
1974             if (numFaces <= 0) {
1975                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1976                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1977                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1978                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1979             }
1980
1981             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1982             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1983             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1984               faceRectangles, numFaces*4);
1985             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1986               faceLandmarks, numFaces*6);
1987
1988            break;
1989            }
1990         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1991             uint8_t  *color_correct_mode =
1992                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1993             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1994             break;
1995          }
1996
1997         // 3A state is sent in urgent partial result (uses quirk)
1998         case CAM_INTF_META_AEC_PRECAPTURE_ID:
1999         case CAM_INTF_META_AEC_ROI:
2000         case CAM_INTF_META_AEC_STATE:
2001         case CAM_INTF_PARM_FOCUS_MODE:
2002         case CAM_INTF_META_AF_ROI:
2003         case CAM_INTF_META_AF_STATE:
2004         case CAM_INTF_META_AF_TRIGGER_ID:
2005         case CAM_INTF_PARM_WHITE_BALANCE:
2006         case CAM_INTF_META_AWB_REGIONS:
2007         case CAM_INTF_META_AWB_STATE:
2008         case CAM_INTF_META_MODE: {
2009           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
2010           break;
2011         }
2012
2013          case CAM_INTF_META_EDGE_MODE: {
2014             cam_edge_application_t  *edgeApplication =
2015                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
2016             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2017             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2018             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2019             break;
2020          }
2021          case CAM_INTF_META_FLASH_POWER: {
2022             uint8_t  *flashPower =
2023                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2024             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2025             break;
2026          }
2027          case CAM_INTF_META_FLASH_FIRING_TIME: {
2028             int64_t  *flashFiringTime =
2029                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2030             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2031             break;
2032          }
2033          case CAM_INTF_META_FLASH_STATE: {
2034             uint8_t  flashState =
2035                *((uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata));
2036             if (!gCamCapability[mCameraId]->flash_available) {
2037                 flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2038             }
2039             camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
2040             break;
2041          }
2042          case CAM_INTF_META_FLASH_MODE:{
2043             uint8_t flashMode = *((uint8_t*)
2044                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata));
2045             uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
2046                                          sizeof(FLASH_MODES_MAP),
2047                                          flashMode);
2048             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
2049             break;
2050          }
2051          case CAM_INTF_META_HOTPIXEL_MODE: {
2052              uint8_t  *hotPixelMode =
2053                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2054              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2055              break;
2056          }
2057          case CAM_INTF_META_LENS_APERTURE:{
2058             float  *lensAperture =
2059                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2060             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2061             break;
2062          }
2063          case CAM_INTF_META_LENS_FILTERDENSITY: {
2064             float  *filterDensity =
2065                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2066             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2067             break;
2068          }
2069          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2070             float  *focalLength =
2071                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2072             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2073             break;
2074          }
2075          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2076             float  *focusDistance =
2077                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2078             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2079             break;
2080          }
2081          case CAM_INTF_META_LENS_FOCUS_RANGE: {
2082             float  *focusRange =
2083                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2084             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2085             break;
2086          }
2087          case CAM_INTF_META_LENS_STATE: {
2088             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2089             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2090             break;
2091          }
2092          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2093             uint8_t  *opticalStab =
2094                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2095             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2096             break;
2097          }
2098          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2099             uint8_t  *noiseRedMode =
2100                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2101             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2102             break;
2103          }
2104          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2105             uint8_t  *noiseRedStrength =
2106                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2107             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2108             break;
2109          }
2110          case CAM_INTF_META_SCALER_CROP_REGION: {
2111             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2112             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2113             int32_t scalerCropRegion[4];
2114             scalerCropRegion[0] = hScalerCropRegion->left;
2115             scalerCropRegion[1] = hScalerCropRegion->top;
2116             scalerCropRegion[2] = hScalerCropRegion->width;
2117             scalerCropRegion[3] = hScalerCropRegion->height;
2118             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2119             break;
2120          }
2121          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2122             int64_t  *sensorExpTime =
2123                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2124             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2125             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2126             break;
2127          }
2128          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2129             int64_t  *sensorFameDuration =
2130                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2131             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2132             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2133             break;
2134          }
2135          case CAM_INTF_META_SENSOR_SENSITIVITY:{
2136             int32_t  *sensorSensitivity =
2137                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2138             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
2139             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
2140             break;
2141          }
2142          case CAM_INTF_META_SHADING_MODE: {
2143             uint8_t  *shadingMode =
2144                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2145             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2146             break;
2147          }
2148          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2149             uint8_t  *faceDetectMode =
2150                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2151             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2152                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2153                                                        *faceDetectMode);
2154             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2155             break;
2156          }
2157          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2158             uint8_t  *histogramMode =
2159                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2160             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2161             break;
2162          }
2163          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2164               uint8_t  *sharpnessMapMode =
2165                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2166               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2167                                  sharpnessMapMode, 1);
2168               break;
2169           }
2170          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2171               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2172               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2173               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2174                                  (int32_t*)sharpnessMap->sharpness,
2175                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2176               break;
2177          }
2178          case CAM_INTF_META_LENS_SHADING_MAP: {
2179               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2180               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2181               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2182               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2183               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2184                                  (float*)lensShadingMap->lens_shading,
2185                                  4*map_width*map_height);
2186               break;
2187          }
2188
2189          case CAM_INTF_META_TONEMAP_MODE: {
2190             uint8_t  *toneMapMode =
2191                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2192             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2193             break;
2194          }
2195
2196          case CAM_INTF_META_TONEMAP_CURVES:{
2197             //Populate CAM_INTF_META_TONEMAP_CURVES
2198             /* ch0 = G, ch 1 = B, ch 2 = R*/
2199             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2200             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2201             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2202                                (float*)tonemap->curves[0].tonemap_points,
2203                                tonemap->tonemap_points_cnt * 2);
2204
2205             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2206                                (float*)tonemap->curves[1].tonemap_points,
2207                                tonemap->tonemap_points_cnt * 2);
2208
2209             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2210                                (float*)tonemap->curves[2].tonemap_points,
2211                                tonemap->tonemap_points_cnt * 2);
2212             break;
2213          }
2214
2215          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2216             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2217             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2218             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2219             break;
2220          }
2221          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2222              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2223              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2224              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2225                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2226              break;
2227          }
2228
2229          /* DNG file realted metadata */
2230          case CAM_INTF_META_PROFILE_TONE_CURVE: {
2231             cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
2232             POINTER_OF(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
2233             camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
2234                                (float*)toneCurve->curve.tonemap_points,
2235                                toneCurve->tonemap_points_cnt * 2);
2236             break;
2237          }
2238
2239          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2240             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2241             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2242             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2243                       predColorCorrectionGains->gains, 4);
2244             break;
2245          }
2246          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2247             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2248                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2249             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2250                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2251             break;
2252
2253          }
2254
2255          case CAM_INTF_META_OTP_WB_GRGB:{
2256             float *otpWbGrGb = (float*) POINTER_OF(CAM_INTF_META_OTP_WB_GRGB, metadata);
2257             camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
2258             break;
2259          }
2260
2261          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2262             uint8_t *blackLevelLock = (uint8_t*)
2263               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2264             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2265             break;
2266          }
2267          case CAM_INTF_META_SCENE_FLICKER:{
2268             uint8_t *sceneFlicker = (uint8_t*)
2269             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2270             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2271             break;
2272          }
2273          case CAM_INTF_PARM_LED_MODE:
2274             break;
2275          case CAM_INTF_PARM_EFFECT: {
2276             uint8_t *effectMode = (uint8_t*)
2277                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2278             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2279                                                    sizeof(EFFECT_MODES_MAP),
2280                                                    *effectMode);
2281             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2282             break;
2283          }
2284          case CAM_INTF_META_TEST_PATTERN_DATA: {
2285             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2286                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2287             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2288                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2289                     testPatternData->mode);
2290             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2291                     &fwk_testPatternMode, 1);
2292             break;
2293          }
2294          case CAM_INTF_META_JPEG_GPS_COORDINATES: {
2295              double *gps_coords = (double *)POINTER_OF(
2296                      CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2297              camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
2298              break;
2299          }
2300          case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
2301              char *gps_methods = (char *)POINTER_OF(
2302                      CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2303              String8 str(gps_methods);
2304              camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2305              break;
2306          }
2307          case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
2308              int64_t *gps_timestamp = (int64_t *)POINTER_OF(
2309                      CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2310              camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
2311              break;
2312          }
2313          case CAM_INTF_META_JPEG_ORIENTATION: {
2314              int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
2315                      CAM_INTF_META_JPEG_ORIENTATION, metadata);
2316              camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
2317              break;
2318          }
2319          case CAM_INTF_META_JPEG_QUALITY: {
2320              uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
2321                      CAM_INTF_META_JPEG_QUALITY, metadata);
2322              camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
2323              break;
2324          }
2325          case CAM_INTF_META_JPEG_THUMB_QUALITY: {
2326              uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
2327                      CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2328              camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
2329              break;
2330          }
2331
2332          case CAM_INTF_META_JPEG_THUMB_SIZE: {
2333              cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
2334                      CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2335              camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
2336              break;
2337          }
2338
2339             break;
2340          case CAM_INTF_META_PRIVATE_DATA: {
2341             uint8_t *privateData = (uint8_t *)
2342                 POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
2343             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
2344                 privateData, MAX_METADATA_PAYLOAD_SIZE);
2345             break;
2346          }
2347
2348          case CAM_INTF_META_NEUTRAL_COL_POINT:{
2349             cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
2350                 POINTER_OF(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
2351             camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
2352                     (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
2353             break;
2354          }
2355
2356          default:
2357             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2358                   __func__, curr_entry);
2359             break;
2360       }
2361       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2362       curr_entry = next_entry;
2363    }
2364
2365    uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
2366    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2367
2368    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
2369    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
2370
2371    int32_t hotPixelMap[2];
2372    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2373
2374    resultMetadata = camMetadata.release();
2375    return resultMetadata;
2376}
2377
2378/*===========================================================================
2379 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2380 *
2381 * DESCRIPTION:
2382 *
2383 * PARAMETERS :
2384 *   @metadata : metadata information from callback
2385 *
2386 * RETURN     : camera_metadata_t*
2387 *              metadata in a format specified by fwk
2388 *==========================================================================*/
2389camera_metadata_t*
2390QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2391                                (metadata_buffer_t *metadata) {
2392
2393    CameraMetadata camMetadata;
2394    camera_metadata_t* resultMetadata;
2395
2396    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2397    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2398
2399    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2400    uint8_t next_entry;
2401    while (curr_entry != CAM_INTF_PARM_MAX) {
2402      switch (curr_entry) {
2403        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2404            int32_t  *ae_precapture_id =
2405              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2406            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2407                                          ae_precapture_id, 1);
2408            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2409          break;
2410        }
2411        case CAM_INTF_META_AEC_ROI: {
2412            cam_area_t  *hAeRegions =
2413                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2414            int32_t aeRegions[5];
2415            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2416            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2417            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
2418            break;
2419        }
2420        case CAM_INTF_META_AEC_STATE:{
2421            uint8_t *ae_state =
2422                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2423            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2424            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2425            break;
2426        }
2427        case CAM_INTF_PARM_FOCUS_MODE:{
2428            uint8_t  *focusMode =
2429                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2430            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2431               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2432            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2433            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2434            break;
2435        }
2436        case CAM_INTF_META_AF_ROI:{
2437            /*af regions*/
2438            cam_area_t  *hAfRegions =
2439                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2440            int32_t afRegions[5];
2441            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2442            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2443            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
2444            break;
2445        }
2446        case CAM_INTF_META_AF_STATE: {
2447            uint8_t  *afState =
2448               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2449            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2450            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2451            break;
2452        }
2453        case CAM_INTF_META_AF_TRIGGER_ID: {
2454            int32_t  *afTriggerId =
2455                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2456            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2457            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2458            break;
2459        }
2460        case CAM_INTF_PARM_WHITE_BALANCE: {
2461           uint8_t  *whiteBalance =
2462                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2463             uint8_t fwkWhiteBalanceMode =
2464                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2465                    sizeof(WHITE_BALANCE_MODES_MAP)/
2466                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2467             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2468                 &fwkWhiteBalanceMode, 1);
2469            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2470             break;
2471        }
2472        case CAM_INTF_META_AWB_REGIONS: {
2473           /*awb regions*/
2474           cam_area_t  *hAwbRegions =
2475               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2476           int32_t awbRegions[5];
2477           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2478           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2479           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2480           break;
2481        }
2482        case CAM_INTF_META_AWB_STATE: {
2483           uint8_t  *whiteBalanceState =
2484              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2485           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2486           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2487           break;
2488        }
2489        case CAM_INTF_META_MODE: {
2490            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2491            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2492            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
2493            break;
2494        }
2495        default:
2496            ALOGV("%s: Normal Metadata %d, do not process",
2497              __func__, curr_entry);
2498       }
2499       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2500       curr_entry = next_entry;
2501    }
2502    resultMetadata = camMetadata.release();
2503    return resultMetadata;
2504}
2505
2506/*===========================================================================
2507 * FUNCTION   : dumpMetadataToFile
2508 *
2509 * DESCRIPTION: Dumps tuning metadata to file system
2510 *
2511 * PARAMETERS :
2512 *   @meta           : tuning metadata
2513 *   @dumpFrameCount : current dump frame count
2514 *   @enabled        : Enable mask
2515 *
2516 *==========================================================================*/
2517void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
2518                                                   uint32_t &dumpFrameCount,
2519                                                   int32_t enabled,
2520                                                   const char *type,
2521                                                   uint32_t frameNumber)
2522{
2523    uint32_t frm_num = 0;
2524
2525    //Some sanity checks
2526    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
2527        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
2528              __func__,
2529              meta.tuning_sensor_data_size,
2530              TUNING_SENSOR_DATA_MAX);
2531        return;
2532    }
2533
2534    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
2535        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
2536              __func__,
2537              meta.tuning_vfe_data_size,
2538              TUNING_VFE_DATA_MAX);
2539        return;
2540    }
2541
2542    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
2543        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
2544              __func__,
2545              meta.tuning_cpp_data_size,
2546              TUNING_CPP_DATA_MAX);
2547        return;
2548    }
2549
2550    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
2551        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
2552              __func__,
2553              meta.tuning_cac_data_size,
2554              TUNING_CAC_DATA_MAX);
2555        return;
2556    }
2557    //
2558
2559    if(enabled){
2560        frm_num = ((enabled & 0xffff0000) >> 16);
2561        if(frm_num == 0) {
2562            frm_num = 10; //default 10 frames
2563        }
2564        if(frm_num > 256) {
2565            frm_num = 256; //256 buffers cycle around
2566        }
2567        if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
2568            // reset frame count if cycling
2569            dumpFrameCount = 0;
2570        }
2571        ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
2572        if (dumpFrameCount < frm_num) {
2573            char timeBuf[FILENAME_MAX];
2574            char buf[FILENAME_MAX];
2575            memset(buf, 0, sizeof(buf));
2576            memset(timeBuf, 0, sizeof(timeBuf));
2577            time_t current_time;
2578            struct tm * timeinfo;
2579            time (&current_time);
2580            timeinfo = localtime (&current_time);
2581            strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
2582            String8 filePath(timeBuf);
2583            snprintf(buf,
2584                     sizeof(buf),
2585                     "%d_HAL_META_%s_%d.bin",
2586                     dumpFrameCount,
2587                     type,
2588                     frameNumber);
2589            filePath.append(buf);
2590            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2591            if (file_fd > 0) {
2592                int written_len = 0;
2593                meta.tuning_data_version = TUNING_DATA_VERSION;
2594                void *data = (void *)((uint8_t *)&meta.tuning_data_version);
2595                written_len += write(file_fd, data, sizeof(uint32_t));
2596                data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
2597                ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
2598                written_len += write(file_fd, data, sizeof(uint32_t));
2599                data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
2600                ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
2601                written_len += write(file_fd, data, sizeof(uint32_t));
2602                data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
2603                ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
2604                written_len += write(file_fd, data, sizeof(uint32_t));
2605                data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
2606                ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
2607                written_len += write(file_fd, data, sizeof(uint32_t));
2608                int total_size = meta.tuning_sensor_data_size;
2609                data = (void *)((uint8_t *)&meta.data);
2610                written_len += write(file_fd, data, total_size);
2611                total_size = meta.tuning_vfe_data_size;
2612                data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
2613                written_len += write(file_fd, data, total_size);
2614                total_size = meta.tuning_cpp_data_size;
2615                data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
2616                written_len += write(file_fd, data, total_size);
2617                total_size = meta.tuning_cac_data_size;
2618                data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
2619                written_len += write(file_fd, data, total_size);
2620                close(file_fd);
2621            }else {
2622                ALOGE("%s: fail t open file for image dumping", __func__);
2623            }
2624            dumpFrameCount++;
2625        }
2626    }
2627}
2628
2629/*===========================================================================
2630 * FUNCTION   : cleanAndSortStreamInfo
2631 *
2632 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
2633 *              and sort them such that raw stream is at the end of the list
2634 *              This is a workaround for camera daemon constraint.
2635 *
2636 * PARAMETERS : None
2637 *
2638 *==========================================================================*/
2639void QCamera3HardwareInterface::cleanAndSortStreamInfo()
2640{
2641    List<stream_info_t *> newStreamInfo;
2642
2643    /*clean up invalid streams*/
2644    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2645            it != mStreamInfo.end();) {
2646        if(((*it)->status) == INVALID){
2647            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
2648            delete channel;
2649            free(*it);
2650            it = mStreamInfo.erase(it);
2651        } else {
2652            it++;
2653        }
2654    }
2655
2656    // Move preview/video/callback/snapshot streams into newList
2657    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2658            it != mStreamInfo.end();) {
2659        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
2660                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
2661            newStreamInfo.push_back(*it);
2662            it = mStreamInfo.erase(it);
2663        } else
2664            it++;
2665    }
2666    // Move raw streams into newList
2667    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2668            it != mStreamInfo.end();) {
2669        newStreamInfo.push_back(*it);
2670        it = mStreamInfo.erase(it);
2671    }
2672
2673    mStreamInfo = newStreamInfo;
2674}
2675
2676/*===========================================================================
2677 * FUNCTION   : extractJpegMetadata
2678 *
2679 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
2680 *              JPEG metadata is cached in HAL, and return as part of capture
2681 *              result when metadata is returned from camera daemon.
2682 *
2683 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
2684 *              @request:      capture request
2685 *
2686 *==========================================================================*/
2687void QCamera3HardwareInterface::extractJpegMetadata(
2688        CameraMetadata& jpegMetadata,
2689        const camera3_capture_request_t *request)
2690{
2691    CameraMetadata frame_settings;
2692    frame_settings = request->settings;
2693
2694    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
2695        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
2696                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
2697                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
2698
2699    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
2700        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
2701                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
2702                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
2703
2704    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
2705        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
2706                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
2707                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
2708
2709    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
2710        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
2711                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
2712                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
2713
2714    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
2715        jpegMetadata.update(ANDROID_JPEG_QUALITY,
2716                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
2717                frame_settings.find(ANDROID_JPEG_QUALITY).count);
2718
2719    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
2720        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
2721                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
2722                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
2723
2724    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
2725        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
2726                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
2727                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
2728}
2729
2730/*===========================================================================
2731 * FUNCTION   : convertToRegions
2732 *
2733 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2734 *
2735 * PARAMETERS :
2736 *   @rect   : cam_rect_t struct to convert
2737 *   @region : int32_t destination array
2738 *   @weight : if we are converting from cam_area_t, weight is valid
2739 *             else weight = -1
2740 *
2741 *==========================================================================*/
2742void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2743    region[0] = rect.left;
2744    region[1] = rect.top;
2745    region[2] = rect.left + rect.width;
2746    region[3] = rect.top + rect.height;
2747    if (weight > -1) {
2748        region[4] = weight;
2749    }
2750}
2751
2752/*===========================================================================
2753 * FUNCTION   : convertFromRegions
2754 *
2755 * DESCRIPTION: helper method to convert from array to cam_rect_t
2756 *
2757 * PARAMETERS :
2758 *   @rect   : cam_rect_t struct to convert
2759 *   @region : int32_t destination array
2760 *   @weight : if we are converting from cam_area_t, weight is valid
2761 *             else weight = -1
2762 *
2763 *==========================================================================*/
2764void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2765                                                   const camera_metadata_t *settings,
2766                                                   uint32_t tag){
2767    CameraMetadata frame_settings;
2768    frame_settings = settings;
2769    int32_t x_min = frame_settings.find(tag).data.i32[0];
2770    int32_t y_min = frame_settings.find(tag).data.i32[1];
2771    int32_t x_max = frame_settings.find(tag).data.i32[2];
2772    int32_t y_max = frame_settings.find(tag).data.i32[3];
2773    roi->weight = frame_settings.find(tag).data.i32[4];
2774    roi->rect.left = x_min;
2775    roi->rect.top = y_min;
2776    roi->rect.width = x_max - x_min;
2777    roi->rect.height = y_max - y_min;
2778}
2779
2780/*===========================================================================
2781 * FUNCTION   : resetIfNeededROI
2782 *
2783 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2784 *              crop region
2785 *
2786 * PARAMETERS :
2787 *   @roi       : cam_area_t struct to resize
2788 *   @scalerCropRegion : cam_crop_region_t region to compare against
2789 *
2790 *
2791 *==========================================================================*/
2792bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2793                                                 const cam_crop_region_t* scalerCropRegion)
2794{
2795    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2796    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2797    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2798    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2799    if ((roi_x_max < scalerCropRegion->left) ||
2800        (roi_y_max < scalerCropRegion->top)  ||
2801        (roi->rect.left > crop_x_max) ||
2802        (roi->rect.top > crop_y_max)){
2803        return false;
2804    }
2805    if (roi->rect.left < scalerCropRegion->left) {
2806        roi->rect.left = scalerCropRegion->left;
2807    }
2808    if (roi->rect.top < scalerCropRegion->top) {
2809        roi->rect.top = scalerCropRegion->top;
2810    }
2811    if (roi_x_max > crop_x_max) {
2812        roi_x_max = crop_x_max;
2813    }
2814    if (roi_y_max > crop_y_max) {
2815        roi_y_max = crop_y_max;
2816    }
2817    roi->rect.width = roi_x_max - roi->rect.left;
2818    roi->rect.height = roi_y_max - roi->rect.top;
2819    return true;
2820}
2821
2822/*===========================================================================
2823 * FUNCTION   : convertLandmarks
2824 *
2825 * DESCRIPTION: helper method to extract the landmarks from face detection info
2826 *
2827 * PARAMETERS :
2828 *   @face   : cam_rect_t struct to convert
2829 *   @landmarks : int32_t destination array
2830 *
2831 *
2832 *==========================================================================*/
2833void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2834{
2835    landmarks[0] = face.left_eye_center.x;
2836    landmarks[1] = face.left_eye_center.y;
2837    landmarks[2] = face.right_eye_center.x;
2838    landmarks[3] = face.right_eye_center.y;
2839    landmarks[4] = face.mouth_center.x;
2840    landmarks[5] = face.mouth_center.y;
2841}
2842
2843#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2844/*===========================================================================
2845 * FUNCTION   : initCapabilities
2846 *
2847 * DESCRIPTION: initialize camera capabilities in static data struct
2848 *
2849 * PARAMETERS :
2850 *   @cameraId  : camera Id
2851 *
2852 * RETURN     : int32_t type of status
2853 *              NO_ERROR  -- success
2854 *              none-zero failure code
2855 *==========================================================================*/
2856int QCamera3HardwareInterface::initCapabilities(int cameraId)
2857{
2858    int rc = 0;
2859    mm_camera_vtbl_t *cameraHandle = NULL;
2860    QCamera3HeapMemory *capabilityHeap = NULL;
2861
2862    cameraHandle = camera_open(cameraId);
2863    if (!cameraHandle) {
2864        ALOGE("%s: camera_open failed", __func__);
2865        rc = -1;
2866        goto open_failed;
2867    }
2868
2869    capabilityHeap = new QCamera3HeapMemory();
2870    if (capabilityHeap == NULL) {
2871        ALOGE("%s: creation of capabilityHeap failed", __func__);
2872        goto heap_creation_failed;
2873    }
2874    /* Allocate memory for capability buffer */
2875    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2876    if(rc != OK) {
2877        ALOGE("%s: No memory for cappability", __func__);
2878        goto allocate_failed;
2879    }
2880
2881    /* Map memory for capability buffer */
2882    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2883    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2884                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2885                                capabilityHeap->getFd(0),
2886                                sizeof(cam_capability_t));
2887    if(rc < 0) {
2888        ALOGE("%s: failed to map capability buffer", __func__);
2889        goto map_failed;
2890    }
2891
2892    /* Query Capability */
2893    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2894    if(rc < 0) {
2895        ALOGE("%s: failed to query capability",__func__);
2896        goto query_failed;
2897    }
2898    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2899    if (!gCamCapability[cameraId]) {
2900        ALOGE("%s: out of memory", __func__);
2901        goto query_failed;
2902    }
2903    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2904                                        sizeof(cam_capability_t));
2905    rc = 0;
2906
2907query_failed:
2908    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2909                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2910map_failed:
2911    capabilityHeap->deallocate();
2912allocate_failed:
2913    delete capabilityHeap;
2914heap_creation_failed:
2915    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2916    cameraHandle = NULL;
2917open_failed:
2918    return rc;
2919}
2920
2921/*===========================================================================
2922 * FUNCTION   : initParameters
2923 *
2924 * DESCRIPTION: initialize camera parameters
2925 *
2926 * PARAMETERS :
2927 *
2928 * RETURN     : int32_t type of status
2929 *              NO_ERROR  -- success
2930 *              none-zero failure code
2931 *==========================================================================*/
2932int QCamera3HardwareInterface::initParameters()
2933{
2934    int rc = 0;
2935
2936    //Allocate Set Param Buffer
2937    mParamHeap = new QCamera3HeapMemory();
2938    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
2939    if(rc != OK) {
2940        rc = NO_MEMORY;
2941        ALOGE("Failed to allocate SETPARM Heap memory");
2942        delete mParamHeap;
2943        mParamHeap = NULL;
2944        return rc;
2945    }
2946
2947    //Map memory for parameters buffer
2948    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2949            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2950            mParamHeap->getFd(0),
2951            sizeof(metadata_buffer_t));
2952    if(rc < 0) {
2953        ALOGE("%s:failed to map SETPARM buffer",__func__);
2954        rc = FAILED_TRANSACTION;
2955        mParamHeap->deallocate();
2956        delete mParamHeap;
2957        mParamHeap = NULL;
2958        return rc;
2959    }
2960
2961    mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
2962    return rc;
2963}
2964
2965/*===========================================================================
2966 * FUNCTION   : deinitParameters
2967 *
2968 * DESCRIPTION: de-initialize camera parameters
2969 *
2970 * PARAMETERS :
2971 *
2972 * RETURN     : NONE
2973 *==========================================================================*/
2974void QCamera3HardwareInterface::deinitParameters()
2975{
2976    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2977            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2978
2979    mParamHeap->deallocate();
2980    delete mParamHeap;
2981    mParamHeap = NULL;
2982
2983    mParameters = NULL;
2984}
2985
2986/*===========================================================================
2987 * FUNCTION   : calcMaxJpegSize
2988 *
2989 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2990 *
2991 * PARAMETERS :
2992 *
2993 * RETURN     : max_jpeg_size
2994 *==========================================================================*/
2995int QCamera3HardwareInterface::calcMaxJpegSize()
2996{
2997    int32_t max_jpeg_size = 0;
2998    int temp_width, temp_height;
2999    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
3000        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
3001        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
3002        if (temp_width * temp_height > max_jpeg_size ) {
3003            max_jpeg_size = temp_width * temp_height;
3004        }
3005    }
3006    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3007    return max_jpeg_size;
3008}
3009
3010/*===========================================================================
3011 * FUNCTION   : initStaticMetadata
3012 *
3013 * DESCRIPTION: initialize the static metadata
3014 *
3015 * PARAMETERS :
3016 *   @cameraId  : camera Id
3017 *
3018 * RETURN     : int32_t type of status
3019 *              0  -- success
3020 *              non-zero failure code
3021 *==========================================================================*/
3022int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
3023{
3024    int rc = 0;
3025    CameraMetadata staticInfo;
3026
3027    /* android.info: hardware level */
3028    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
3029    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
3030        &supportedHardwareLevel, 1);
3031
3032    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
3033    /*HAL 3 only*/
3034    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3035                    &gCamCapability[cameraId]->min_focus_distance, 1);
3036
3037    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
3038                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
3039
3040    /*should be using focal lengths but sensor doesn't provide that info now*/
3041    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3042                      &gCamCapability[cameraId]->focal_length,
3043                      1);
3044
3045    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3046                      gCamCapability[cameraId]->apertures,
3047                      gCamCapability[cameraId]->apertures_count);
3048
3049    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3050                gCamCapability[cameraId]->filter_densities,
3051                gCamCapability[cameraId]->filter_densities_count);
3052
3053
3054    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3055                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
3056                      gCamCapability[cameraId]->optical_stab_modes_count);
3057
3058    staticInfo.update(ANDROID_LENS_POSITION,
3059                      gCamCapability[cameraId]->lens_position,
3060                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
3061
3062    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
3063                                       gCamCapability[cameraId]->lens_shading_map_size.height};
3064    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
3065                      lens_shading_map_size,
3066                      sizeof(lens_shading_map_size)/sizeof(int32_t));
3067
3068    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
3069            gCamCapability[cameraId]->sensor_physical_size, 2);
3070
3071    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
3072            gCamCapability[cameraId]->exposure_time_range, 2);
3073
3074    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3075            &gCamCapability[cameraId]->max_frame_duration, 1);
3076
3077    camera_metadata_rational baseGainFactor = {
3078            gCamCapability[cameraId]->base_gain_factor.numerator,
3079            gCamCapability[cameraId]->base_gain_factor.denominator};
3080    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
3081                      &baseGainFactor, 1);
3082
3083    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3084                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
3085
3086    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
3087                                  gCamCapability[cameraId]->pixel_array_size.height};
3088    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3089                      pixel_array_size, 2);
3090
3091    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
3092                                                gCamCapability[cameraId]->active_array_size.top,
3093                                                gCamCapability[cameraId]->active_array_size.width,
3094                                                gCamCapability[cameraId]->active_array_size.height};
3095    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3096                      active_array_size, 4);
3097
3098    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
3099            &gCamCapability[cameraId]->white_level, 1);
3100
3101    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
3102            gCamCapability[cameraId]->black_level_pattern, 4);
3103
3104    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
3105                      &gCamCapability[cameraId]->flash_charge_duration, 1);
3106
3107    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
3108                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
3109
3110    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
3111    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
3112                      (int32_t*)&maxFaces, 1);
3113
3114    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3115                      &gCamCapability[cameraId]->histogram_size, 1);
3116
3117    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3118            &gCamCapability[cameraId]->max_histogram_count, 1);
3119
3120    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
3121                                    gCamCapability[cameraId]->sharpness_map_size.height};
3122
3123    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
3124            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
3125
3126    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3127            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
3128
3129    int32_t scalar_formats[] = {
3130            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
3131            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
3132            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
3133            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
3134            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
3135    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
3136    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
3137                      scalar_formats,
3138                      scalar_formats_count);
3139
3140    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
3141    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
3142              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
3143              available_processed_sizes);
3144    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
3145                available_processed_sizes,
3146                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
3147
3148    int32_t available_raw_sizes[CAM_FORMAT_MAX * 2];
3149    makeTable(gCamCapability[cameraId]->raw_dim,
3150              gCamCapability[cameraId]->supported_raw_dim_cnt,
3151              available_raw_sizes);
3152    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
3153                available_raw_sizes,
3154                gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
3155
3156    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
3157    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
3158                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
3159                 available_fps_ranges);
3160    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3161            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
3162
3163    camera_metadata_rational exposureCompensationStep = {
3164            gCamCapability[cameraId]->exp_compensation_step.numerator,
3165            gCamCapability[cameraId]->exp_compensation_step.denominator};
3166    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
3167                      &exposureCompensationStep, 1);
3168
3169    /*TO DO*/
3170    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
3171    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3172                      availableVstabModes, sizeof(availableVstabModes));
3173
3174    /** Quirk for urgent 3A state until final interface is worked out */
3175    uint8_t usePartialResultQuirk = 1;
3176    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
3177                      &usePartialResultQuirk, 1);
3178
3179    /*HAL 1 and HAL 3 common*/
3180    float maxZoom = 4;
3181    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3182            &maxZoom, 1);
3183
3184    int32_t max3aRegions[] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
3185    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
3186            max3aRegions, 3);
3187
3188    uint8_t availableFaceDetectModes[] = {
3189            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
3190            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
3191    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3192                      availableFaceDetectModes,
3193                      sizeof(availableFaceDetectModes));
3194
3195    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
3196                                           gCamCapability[cameraId]->exposure_compensation_max};
3197    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
3198            exposureCompensationRange,
3199            sizeof(exposureCompensationRange)/sizeof(int32_t));
3200
3201    uint8_t lensFacing = (facingBack) ?
3202            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
3203    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
3204
3205    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
3206                available_processed_sizes,
3207                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
3208
3209    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3210                      available_thumbnail_sizes,
3211                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
3212
3213    /*android.scaler.availableStreamConfigurations*/
3214    int32_t max_stream_configs_size =
3215            gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3216            sizeof(scalar_formats)/sizeof(int32_t) * 4;
3217    int32_t available_stream_configs[max_stream_configs_size];
3218    int idx = 0;
3219    for (int j = 0; j < scalar_formats_count; j++) {
3220        switch (scalar_formats[j]) {
3221        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3222        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3223            for (int i = 0;
3224                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3225                available_stream_configs[idx] = scalar_formats[j];
3226                available_stream_configs[idx+1] =
3227                    gCamCapability[cameraId]->raw_dim[i].width;
3228                available_stream_configs[idx+2] =
3229                    gCamCapability[cameraId]->raw_dim[i].height;
3230                available_stream_configs[idx+3] =
3231                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3232                idx+=4;
3233            }
3234            break;
3235        default:
3236            for (int i = 0;
3237                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3238                available_stream_configs[idx] = scalar_formats[j];
3239                available_stream_configs[idx+1] =
3240                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3241                available_stream_configs[idx+2] =
3242                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3243                available_stream_configs[idx+3] =
3244                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3245                idx+=4;
3246            }
3247
3248
3249            break;
3250        }
3251    }
3252    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3253                      available_stream_configs, idx);
3254
3255    /* android.scaler.availableMinFrameDurations */
3256    int64_t available_min_durations[max_stream_configs_size];
3257    idx = 0;
3258    for (int j = 0; j < scalar_formats_count; j++) {
3259        switch (scalar_formats[j]) {
3260        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3261        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3262            for (int i = 0;
3263                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3264                available_min_durations[idx] = scalar_formats[j];
3265                available_min_durations[idx+1] =
3266                    gCamCapability[cameraId]->raw_dim[i].width;
3267                available_min_durations[idx+2] =
3268                    gCamCapability[cameraId]->raw_dim[i].height;
3269                available_min_durations[idx+3] =
3270                    gCamCapability[cameraId]->raw_min_duration[i];
3271                idx+=4;
3272            }
3273            break;
3274        default:
3275            for (int i = 0;
3276                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3277                available_min_durations[idx] = scalar_formats[j];
3278                available_min_durations[idx+1] =
3279                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3280                available_min_durations[idx+2] =
3281                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3282                available_min_durations[idx+3] =
3283                    gCamCapability[cameraId]->picture_min_duration[i];
3284                idx+=4;
3285            }
3286            break;
3287        }
3288    }
3289    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3290                      &available_min_durations[0], idx);
3291
3292    int32_t max_jpeg_size = 0;
3293    int temp_width, temp_height;
3294    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3295        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3296        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3297        if (temp_width * temp_height > max_jpeg_size ) {
3298            max_jpeg_size = temp_width * temp_height;
3299        }
3300    }
3301    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3302    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
3303                      &max_jpeg_size, 1);
3304
3305    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
3306    size_t size = 0;
3307    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
3308        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
3309                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
3310                                   gCamCapability[cameraId]->supported_effects[i]);
3311        if (val != NAME_NOT_FOUND) {
3312            avail_effects[size] = (uint8_t)val;
3313            size++;
3314        }
3315    }
3316    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
3317                      avail_effects,
3318                      size);
3319
3320    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
3321    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
3322    int32_t supported_scene_modes_cnt = 0;
3323    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3324        int32_t val = lookupFwkName(SCENE_MODES_MAP,
3325                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3326                                gCamCapability[cameraId]->supported_scene_modes[i]);
3327        if (val != NAME_NOT_FOUND) {
3328            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3329            supported_indexes[supported_scene_modes_cnt] = i;
3330            supported_scene_modes_cnt++;
3331        }
3332    }
3333
3334    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3335                      avail_scene_modes,
3336                      supported_scene_modes_cnt);
3337
3338    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3339    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3340                      supported_scene_modes_cnt,
3341                      scene_mode_overrides,
3342                      supported_indexes,
3343                      cameraId);
3344    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3345                      scene_mode_overrides,
3346                      supported_scene_modes_cnt*3);
3347
3348    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3349    size = 0;
3350    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3351        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3352                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3353                                 gCamCapability[cameraId]->supported_antibandings[i]);
3354        if (val != NAME_NOT_FOUND) {
3355            avail_antibanding_modes[size] = (uint8_t)val;
3356            size++;
3357        }
3358
3359    }
3360    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3361                      avail_antibanding_modes,
3362                      size);
3363
3364    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3365    size = 0;
3366    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3367        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3368                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3369                                gCamCapability[cameraId]->supported_focus_modes[i]);
3370        if (val != NAME_NOT_FOUND) {
3371            avail_af_modes[size] = (uint8_t)val;
3372            size++;
3373        }
3374    }
3375    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3376                      avail_af_modes,
3377                      size);
3378
3379    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3380    size = 0;
3381    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3382        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3383                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3384                                    gCamCapability[cameraId]->supported_white_balances[i]);
3385        if (val != NAME_NOT_FOUND) {
3386            avail_awb_modes[size] = (uint8_t)val;
3387            size++;
3388        }
3389    }
3390    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3391                      avail_awb_modes,
3392                      size);
3393
3394    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3395    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3396      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3397
3398    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3399            available_flash_levels,
3400            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3401
3402    uint8_t flashAvailable;
3403    if (gCamCapability[cameraId]->flash_available)
3404        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3405    else
3406        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3407    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3408            &flashAvailable, 1);
3409
3410    uint8_t avail_ae_modes[5];
3411    size = 0;
3412    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3413        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3414        size++;
3415    }
3416    if (flashAvailable) {
3417        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3418        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3419        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3420    }
3421    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3422                      avail_ae_modes,
3423                      size);
3424
3425    int32_t sensitivity_range[2];
3426    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3427    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3428    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3429                      sensitivity_range,
3430                      sizeof(sensitivity_range) / sizeof(int32_t));
3431
3432    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3433                      &gCamCapability[cameraId]->max_analog_sensitivity,
3434                      1);
3435
3436    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3437    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3438                      &sensor_orientation,
3439                      1);
3440
3441    int32_t max_output_streams[3] = {1, 3, 1};
3442    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3443                      max_output_streams,
3444                      3);
3445
3446    uint8_t avail_leds = 0;
3447    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3448                      &avail_leds, 0);
3449
3450    uint8_t focus_dist_calibrated;
3451    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3452            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3453            gCamCapability[cameraId]->focus_dist_calibrated);
3454    if (val != NAME_NOT_FOUND) {
3455        focus_dist_calibrated = (uint8_t)val;
3456        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3457                     &focus_dist_calibrated, 1);
3458    }
3459
3460    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3461    size = 0;
3462    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3463            i++) {
3464        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3465                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3466                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3467        if (val != NAME_NOT_FOUND) {
3468            avail_testpattern_modes[size] = val;
3469            size++;
3470        }
3471    }
3472    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3473                      avail_testpattern_modes,
3474                      size);
3475
3476    uint8_t max_pipeline_depth = kMaxInFlight;
3477    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3478                      &max_pipeline_depth,
3479                      1);
3480
3481    int32_t partial_result_count = 2;
3482    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3483                      &partial_result_count,
3484                       1);
3485
3486    uint8_t available_capabilities[] =
3487        {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
3488         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
3489         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM};
3490    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3491                      available_capabilities,
3492                      3);
3493
3494    int32_t max_input_streams = 0;
3495    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3496                      &max_input_streams,
3497                      1);
3498
3499    int32_t io_format_map[] = {};
3500    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3501                      io_format_map, 0);
3502
3503    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
3504    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3505                      &max_latency,
3506                      1);
3507
3508    float optical_axis_angle[2];
3509    optical_axis_angle[0] = 0; //need to verify
3510    optical_axis_angle[1] = 0; //need to verify
3511    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3512                      optical_axis_angle,
3513                      2);
3514
3515    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3516    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3517                      available_hot_pixel_modes,
3518                      1);
3519
3520    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3521                                      ANDROID_EDGE_MODE_FAST};
3522    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3523                      available_edge_modes,
3524                      2);
3525
3526    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3527                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
3528    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3529                      available_noise_red_modes,
3530                      2);
3531
3532    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3533                                         ANDROID_TONEMAP_MODE_FAST};
3534    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3535                      available_tonemap_modes,
3536                      2);
3537
3538    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3539    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3540                      available_hot_pixel_map_modes,
3541                      1);
3542
3543    uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3544        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3545        gCamCapability[cameraId]->reference_illuminant1);
3546    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
3547                      &fwkReferenceIlluminant, 1);
3548
3549    fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3550        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3551        gCamCapability[cameraId]->reference_illuminant2);
3552    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
3553                      &fwkReferenceIlluminant, 1);
3554
3555    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
3556                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
3557                      3*3);
3558
3559    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
3560                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
3561                      3*3);
3562
3563    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
3564                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
3565                      3*3);
3566
3567    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
3568                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
3569                      3*3);
3570
3571    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
3572                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
3573                      3*3);
3574
3575    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
3576                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
3577                      3*3);
3578
3579
3580    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
3581       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3582       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3583       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3584       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3585       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3586       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
3587       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3588       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3589       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3590       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3591       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3592       ANDROID_JPEG_GPS_COORDINATES,
3593       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3594       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3595       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3596       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3597       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3598       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3599       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3600       ANDROID_SENSOR_FRAME_DURATION,
3601       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3602       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3603       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3604       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3605       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3606       ANDROID_BLACK_LEVEL_LOCK };
3607    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3608                      available_request_keys,
3609                      sizeof(available_request_keys)/sizeof(int32_t));
3610
3611    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3612       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3613       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
3614       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3615       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3616       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3617       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3618       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3619       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3620       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3621       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3622       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
3623       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3624       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
3625       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3626       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
3627       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3628       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
3629       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3630       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3631       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
3632       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
3633       ANDROID_STATISTICS_FACE_SCORES};
3634    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3635                      available_result_keys,
3636                      sizeof(available_result_keys)/sizeof(int32_t));
3637
3638    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3639       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3640       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
3641       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
3642       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3643       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3644       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
3645       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
3646       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3647       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3648       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3649       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3650       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3651       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3652       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3653       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
3654       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3655       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3656       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3657       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3658       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3659       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3660       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3661       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
3662       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
3663       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
3664       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
3665       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
3666       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3667       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3668       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3669       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3670       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
3671       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3672       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3673       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3674       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3675       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3676       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3677       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3678       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3679       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3680       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3681       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3682       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
3683    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
3684                      available_characteristics_keys,
3685                      sizeof(available_characteristics_keys)/sizeof(int32_t));
3686
3687    /*available stall durations depend on the hw + sw and will be different for different devices */
3688    /*have to add for raw after implementation*/
3689    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB};
3690    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
3691
3692    size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
3693    int64_t available_stall_durations[available_stall_size];
3694    idx = 0;
3695    for (uint32_t j = 0; j < stall_formats_count; j++) {
3696       for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3697          available_stall_durations[idx]   = stall_formats[j];
3698          available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3699          available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3700          available_stall_durations[idx+3] = gCamCapability[cameraId]->stall_durations[i];
3701          idx+=4;
3702       }
3703    }
3704    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
3705                      available_stall_durations,
3706                      idx);
3707
3708    gStaticMetadata[cameraId] = staticInfo.release();
3709    return rc;
3710}
3711
3712/*===========================================================================
3713 * FUNCTION   : makeTable
3714 *
3715 * DESCRIPTION: make a table of sizes
3716 *
3717 * PARAMETERS :
3718 *
3719 *
3720 *==========================================================================*/
3721void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
3722                                          int32_t* sizeTable)
3723{
3724    int j = 0;
3725    for (int i = 0; i < size; i++) {
3726        sizeTable[j] = dimTable[i].width;
3727        sizeTable[j+1] = dimTable[i].height;
3728        j+=2;
3729    }
3730}
3731
3732/*===========================================================================
3733 * FUNCTION   : makeFPSTable
3734 *
3735 * DESCRIPTION: make a table of fps ranges
3736 *
3737 * PARAMETERS :
3738 *
3739 *==========================================================================*/
3740void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
3741                                          int32_t* fpsRangesTable)
3742{
3743    int j = 0;
3744    for (int i = 0; i < size; i++) {
3745        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
3746        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
3747        j+=2;
3748    }
3749}
3750
3751/*===========================================================================
3752 * FUNCTION   : makeOverridesList
3753 *
3754 * DESCRIPTION: make a list of scene mode overrides
3755 *
3756 * PARAMETERS :
3757 *
3758 *
3759 *==========================================================================*/
3760void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
3761                                                  uint8_t size, uint8_t* overridesList,
3762                                                  uint8_t* supported_indexes,
3763                                                  int camera_id)
3764{
3765    /*daemon will give a list of overrides for all scene modes.
3766      However we should send the fwk only the overrides for the scene modes
3767      supported by the framework*/
3768    int j = 0, index = 0, supt = 0;
3769    uint8_t focus_override;
3770    for (int i = 0; i < size; i++) {
3771        supt = 0;
3772        index = supported_indexes[i];
3773        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
3774        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3775                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3776                                                    overridesTable[index].awb_mode);
3777        focus_override = (uint8_t)overridesTable[index].af_mode;
3778        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
3779           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
3780              supt = 1;
3781              break;
3782           }
3783        }
3784        if (supt) {
3785           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3786                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3787                                              focus_override);
3788        } else {
3789           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
3790        }
3791        j+=3;
3792    }
3793}
3794
3795/*===========================================================================
3796 * FUNCTION   : getPreviewHalPixelFormat
3797 *
3798 * DESCRIPTION: convert the format to type recognized by framework
3799 *
3800 * PARAMETERS : format : the format from backend
3801 *
3802 ** RETURN    : format recognized by framework
3803 *
3804 *==========================================================================*/
3805int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
3806{
3807    int32_t halPixelFormat;
3808
3809    switch (format) {
3810    case CAM_FORMAT_YUV_420_NV12:
3811        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
3812        break;
3813    case CAM_FORMAT_YUV_420_NV21:
3814        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3815        break;
3816    case CAM_FORMAT_YUV_420_NV21_ADRENO:
3817        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
3818        break;
3819    case CAM_FORMAT_YUV_420_YV12:
3820        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
3821        break;
3822    case CAM_FORMAT_YUV_422_NV16:
3823    case CAM_FORMAT_YUV_422_NV61:
3824    default:
3825        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3826        break;
3827    }
3828    return halPixelFormat;
3829}
3830
3831/*===========================================================================
3832 * FUNCTION   : getSensorSensitivity
3833 *
3834 * DESCRIPTION: convert iso_mode to an integer value
3835 *
3836 * PARAMETERS : iso_mode : the iso_mode supported by sensor
3837 *
3838 ** RETURN    : sensitivity supported by sensor
3839 *
3840 *==========================================================================*/
3841int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
3842{
3843    int32_t sensitivity;
3844
3845    switch (iso_mode) {
3846    case CAM_ISO_MODE_100:
3847        sensitivity = 100;
3848        break;
3849    case CAM_ISO_MODE_200:
3850        sensitivity = 200;
3851        break;
3852    case CAM_ISO_MODE_400:
3853        sensitivity = 400;
3854        break;
3855    case CAM_ISO_MODE_800:
3856        sensitivity = 800;
3857        break;
3858    case CAM_ISO_MODE_1600:
3859        sensitivity = 1600;
3860        break;
3861    default:
3862        sensitivity = -1;
3863        break;
3864    }
3865    return sensitivity;
3866}
3867
3868/*===========================================================================
3869 * FUNCTION   : AddSetMetaEntryToBatch
3870 *
3871 * DESCRIPTION: add set parameter entry into batch
3872 *
3873 * PARAMETERS :
3874 *   @p_table     : ptr to parameter buffer
3875 *   @paramType   : parameter type
3876 *   @paramLength : length of parameter value
3877 *   @paramValue  : ptr to parameter value
3878 *
3879 * RETURN     : int32_t type of status
3880 *              NO_ERROR  -- success
3881 *              none-zero failure code
3882 *==========================================================================*/
3883int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
3884                                                          unsigned int paramType,
3885                                                          uint32_t paramLength,
3886                                                          void *paramValue)
3887{
3888    int position = paramType;
3889    int current, next;
3890
3891    /*************************************************************************
3892    *                 Code to take care of linking next flags                *
3893    *************************************************************************/
3894    current = GET_FIRST_PARAM_ID(p_table);
3895    if (position == current){
3896        //DO NOTHING
3897    } else if (position < current){
3898        SET_NEXT_PARAM_ID(position, p_table, current);
3899        SET_FIRST_PARAM_ID(p_table, position);
3900    } else {
3901        /* Search for the position in the linked list where we need to slot in*/
3902        while (position > GET_NEXT_PARAM_ID(current, p_table))
3903            current = GET_NEXT_PARAM_ID(current, p_table);
3904
3905        /*If node already exists no need to alter linking*/
3906        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
3907            next = GET_NEXT_PARAM_ID(current, p_table);
3908            SET_NEXT_PARAM_ID(current, p_table, position);
3909            SET_NEXT_PARAM_ID(position, p_table, next);
3910        }
3911    }
3912
3913    /*************************************************************************
3914    *                   Copy contents into entry                             *
3915    *************************************************************************/
3916
3917    if (paramLength > sizeof(parm_type_t)) {
3918        ALOGE("%s:Size of input larger than max entry size",__func__);
3919        return BAD_VALUE;
3920    }
3921    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
3922    SET_PARM_VALID_BIT(paramType,p_table,1);
3923    return NO_ERROR;
3924}
3925
3926/*===========================================================================
3927 * FUNCTION   : lookupFwkName
3928 *
3929 * DESCRIPTION: In case the enum is not same in fwk and backend
3930 *              make sure the parameter is correctly propogated
3931 *
3932 * PARAMETERS  :
3933 *   @arr      : map between the two enums
3934 *   @len      : len of the map
3935 *   @hal_name : name of the hal_parm to map
3936 *
3937 * RETURN     : int type of status
3938 *              fwk_name  -- success
3939 *              none-zero failure code
3940 *==========================================================================*/
3941int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
3942                                             int len, int hal_name)
3943{
3944
3945    for (int i = 0; i < len; i++) {
3946        if (arr[i].hal_name == hal_name)
3947            return arr[i].fwk_name;
3948    }
3949
3950    /* Not able to find matching framework type is not necessarily
3951     * an error case. This happens when mm-camera supports more attributes
3952     * than the frameworks do */
3953    ALOGD("%s: Cannot find matching framework type", __func__);
3954    return NAME_NOT_FOUND;
3955}
3956
3957/*===========================================================================
3958 * FUNCTION   : lookupHalName
3959 *
3960 * DESCRIPTION: In case the enum is not same in fwk and backend
3961 *              make sure the parameter is correctly propogated
3962 *
3963 * PARAMETERS  :
3964 *   @arr      : map between the two enums
3965 *   @len      : len of the map
3966 *   @fwk_name : name of the hal_parm to map
3967 *
3968 * RETURN     : int32_t type of status
3969 *              hal_name  -- success
3970 *              none-zero failure code
3971 *==========================================================================*/
3972int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
3973                                             int len, unsigned int fwk_name)
3974{
3975    for (int i = 0; i < len; i++) {
3976       if (arr[i].fwk_name == fwk_name)
3977           return arr[i].hal_name;
3978    }
3979    ALOGE("%s: Cannot find matching hal type", __func__);
3980    return NAME_NOT_FOUND;
3981}
3982
3983/*===========================================================================
3984 * FUNCTION   : getCapabilities
3985 *
3986 * DESCRIPTION: query camera capabilities
3987 *
3988 * PARAMETERS :
3989 *   @cameraId  : camera Id
3990 *   @info      : camera info struct to be filled in with camera capabilities
3991 *
3992 * RETURN     : int32_t type of status
3993 *              NO_ERROR  -- success
3994 *              none-zero failure code
3995 *==========================================================================*/
3996int QCamera3HardwareInterface::getCamInfo(int cameraId,
3997                                    struct camera_info *info)
3998{
3999    int rc = 0;
4000
4001    if (NULL == gCamCapability[cameraId]) {
4002        rc = initCapabilities(cameraId);
4003        if (rc < 0) {
4004            //pthread_mutex_unlock(&g_camlock);
4005            return rc;
4006        }
4007    }
4008
4009    if (NULL == gStaticMetadata[cameraId]) {
4010        rc = initStaticMetadata(cameraId);
4011        if (rc < 0) {
4012            return rc;
4013        }
4014    }
4015
4016    switch(gCamCapability[cameraId]->position) {
4017    case CAM_POSITION_BACK:
4018        info->facing = CAMERA_FACING_BACK;
4019        break;
4020
4021    case CAM_POSITION_FRONT:
4022        info->facing = CAMERA_FACING_FRONT;
4023        break;
4024
4025    default:
4026        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
4027        rc = -1;
4028        break;
4029    }
4030
4031
4032    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
4033    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
4034    info->static_camera_characteristics = gStaticMetadata[cameraId];
4035
4036    return rc;
4037}
4038
4039/*===========================================================================
4040 * FUNCTION   : translateCapabilityToMetadata
4041 *
4042 * DESCRIPTION: translate the capability into camera_metadata_t
4043 *
4044 * PARAMETERS : type of the request
4045 *
4046 *
4047 * RETURN     : success: camera_metadata_t*
4048 *              failure: NULL
4049 *
4050 *==========================================================================*/
4051camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
4052{
4053    pthread_mutex_lock(&mMutex);
4054
4055    if (mDefaultMetadata[type] != NULL) {
4056        pthread_mutex_unlock(&mMutex);
4057        return mDefaultMetadata[type];
4058    }
4059    //first time we are handling this request
4060    //fill up the metadata structure using the wrapper class
4061    CameraMetadata settings;
4062    //translate from cam_capability_t to camera_metadata_tag_t
4063    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
4064    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
4065    int32_t defaultRequestID = 0;
4066    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
4067
4068    uint8_t controlIntent = 0;
4069    uint8_t focusMode;
4070    switch (type) {
4071      case CAMERA3_TEMPLATE_PREVIEW:
4072        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
4073        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4074        break;
4075      case CAMERA3_TEMPLATE_STILL_CAPTURE:
4076        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
4077        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4078        break;
4079      case CAMERA3_TEMPLATE_VIDEO_RECORD:
4080        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
4081        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4082        break;
4083      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
4084        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
4085        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4086        break;
4087      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
4088        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
4089        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4090        break;
4091      case CAMERA3_TEMPLATE_MANUAL:
4092        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
4093        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4094        break;
4095      default:
4096        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
4097        break;
4098    }
4099    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
4100
4101    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
4102        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4103    }
4104    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
4105
4106    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
4107            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
4108
4109    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
4110    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4111
4112    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
4113    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
4114
4115    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
4116    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
4117
4118    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
4119    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
4120
4121    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
4122    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
4123
4124    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
4125    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
4126
4127    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
4128    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
4129
4130    /*flash*/
4131    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
4132    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
4133
4134    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
4135    settings.update(ANDROID_FLASH_FIRING_POWER,
4136            &flashFiringLevel, 1);
4137
4138    /* lens */
4139    float default_aperture = gCamCapability[mCameraId]->apertures[0];
4140    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
4141
4142    if (gCamCapability[mCameraId]->filter_densities_count) {
4143        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
4144        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
4145                        gCamCapability[mCameraId]->filter_densities_count);
4146    }
4147
4148    float default_focal_length = gCamCapability[mCameraId]->focal_length;
4149    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
4150
4151    float default_focus_distance = 0;
4152    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
4153
4154    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
4155    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
4156
4157    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
4158    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
4159
4160    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
4161    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
4162
4163    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
4164    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
4165
4166    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
4167    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
4168
4169    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
4170    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
4171
4172    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4173    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4174
4175    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4176    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
4177
4178    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4179    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
4180
4181    /* Exposure time(Update the Min Exposure Time)*/
4182    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
4183    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
4184
4185    /* frame duration */
4186    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
4187    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
4188
4189    /* sensitivity */
4190    static const int32_t default_sensitivity = 100;
4191    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
4192
4193    /*edge mode*/
4194    static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
4195    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
4196
4197    /*noise reduction mode*/
4198    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
4199    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
4200
4201    /*color correction mode*/
4202    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
4203    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
4204
4205    /*transform matrix mode*/
4206    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
4207    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
4208
4209    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
4210    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
4211
4212    int32_t scaler_crop_region[4];
4213    scaler_crop_region[0] = 0;
4214    scaler_crop_region[1] = 0;
4215    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
4216    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
4217    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
4218
4219    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
4220    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
4221
4222    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4223    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
4224
4225    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
4226                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
4227                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
4228    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
4229
4230    /*focus distance*/
4231    float focus_distance = 0.0;
4232    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
4233
4234    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
4235    float max_range = 0.0;
4236    float max_fixed_fps = 0.0;
4237    int32_t fps_range[2] = {0, 0};
4238    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
4239            i++) {
4240        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
4241            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4242        if (type == CAMERA3_TEMPLATE_PREVIEW ||
4243                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
4244                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
4245            if (range > max_range) {
4246                fps_range[0] =
4247                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4248                fps_range[1] =
4249                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4250                max_range = range;
4251            }
4252        } else {
4253            if (range < 0.01 && max_fixed_fps <
4254                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
4255                fps_range[0] =
4256                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4257                fps_range[1] =
4258                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4259                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4260            }
4261        }
4262    }
4263    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
4264
4265    /*precapture trigger*/
4266    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
4267    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
4268
4269    /*af trigger*/
4270    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
4271    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
4272
4273    /* ae & af regions */
4274    int32_t active_region[] = {
4275            gCamCapability[mCameraId]->active_array_size.left,
4276            gCamCapability[mCameraId]->active_array_size.top,
4277            gCamCapability[mCameraId]->active_array_size.left +
4278                    gCamCapability[mCameraId]->active_array_size.width,
4279            gCamCapability[mCameraId]->active_array_size.top +
4280                    gCamCapability[mCameraId]->active_array_size.height,
4281            1};
4282    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
4283    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
4284
4285    /* black level lock */
4286    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4287    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
4288
4289    /* face detect mode */
4290    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
4291    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
4292
4293    /* lens shading map mode */
4294    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4295    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
4296
4297    //special defaults for manual template
4298    if (type == CAMERA3_TEMPLATE_MANUAL) {
4299        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
4300        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
4301
4302        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
4303        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
4304
4305        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
4306        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
4307
4308        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
4309        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
4310
4311        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
4312        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
4313
4314        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
4315        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
4316    }
4317    mDefaultMetadata[type] = settings.release();
4318
4319    pthread_mutex_unlock(&mMutex);
4320    return mDefaultMetadata[type];
4321}
4322
4323/*===========================================================================
4324 * FUNCTION   : setFrameParameters
4325 *
4326 * DESCRIPTION: set parameters per frame as requested in the metadata from
4327 *              framework
4328 *
4329 * PARAMETERS :
4330 *   @request   : request that needs to be serviced
4331 *   @streamID : Stream ID of all the requested streams
4332 *
4333 * RETURN     : success: NO_ERROR
4334 *              failure:
4335 *==========================================================================*/
4336int QCamera3HardwareInterface::setFrameParameters(
4337                    camera3_capture_request_t *request,
4338                    cam_stream_ID_t streamID)
4339{
4340    /*translate from camera_metadata_t type to parm_type_t*/
4341    int rc = 0;
4342    int32_t hal_version = CAM_HAL_V3;
4343
4344    memset(mParameters, 0, sizeof(metadata_buffer_t));
4345    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
4346    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
4347                sizeof(hal_version), &hal_version);
4348    if (rc < 0) {
4349        ALOGE("%s: Failed to set hal version in the parameters", __func__);
4350        return BAD_VALUE;
4351    }
4352
4353    /*we need to update the frame number in the parameters*/
4354    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
4355                                sizeof(request->frame_number), &(request->frame_number));
4356    if (rc < 0) {
4357        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4358        return BAD_VALUE;
4359    }
4360
4361    /* Update stream id of all the requested buffers */
4362    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
4363                                sizeof(cam_stream_ID_t), &streamID);
4364
4365    if (rc < 0) {
4366        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
4367        return BAD_VALUE;
4368    }
4369
4370    if(request->settings != NULL){
4371        rc = translateToHalMetadata(request, mParameters);
4372    }
4373
4374    /*set the parameters to backend*/
4375    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
4376    return rc;
4377}
4378
4379/*===========================================================================
4380 * FUNCTION   : setReprocParameters
4381 *
4382 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
4383 *              queue it to picture channel for reprocessing.
4384 *
4385 * PARAMETERS :
4386 *   @request   : request that needs to be serviced
4387 *
4388 * RETURN     : success: NO_ERROR
4389 *              failure: non zero failure code
4390 *==========================================================================*/
4391int QCamera3HardwareInterface::setReprocParameters(
4392        camera3_capture_request_t *request)
4393{
4394    /*translate from camera_metadata_t type to parm_type_t*/
4395    int rc = 0;
4396    metadata_buffer_t *reprocParam = NULL;
4397
4398    if(request->settings != NULL){
4399        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
4400        return BAD_VALUE;
4401    }
4402    reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
4403    if (!reprocParam) {
4404        ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
4405        return NO_MEMORY;
4406    }
4407    memset(reprocParam, 0, sizeof(metadata_buffer_t));
4408    reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
4409
4410    /*we need to update the frame number in the parameters*/
4411    rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
4412                                sizeof(request->frame_number), &(request->frame_number));
4413    if (rc < 0) {
4414        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4415        return BAD_VALUE;
4416    }
4417
4418
4419    rc = translateToHalMetadata(request, reprocParam);
4420    if (rc < 0) {
4421        ALOGE("%s: Failed to translate reproc request", __func__);
4422        delete reprocParam;
4423        return rc;
4424    }
4425    /*queue metadata for reprocessing*/
4426    rc = mPictureChannel->queueReprocMetadata(reprocParam);
4427    if (rc < 0) {
4428        ALOGE("%s: Failed to queue reprocessing metadata", __func__);
4429        delete reprocParam;
4430    }
4431
4432    return rc;
4433}
4434
4435/*===========================================================================
4436 * FUNCTION   : translateToHalMetadata
4437 *
4438 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
4439 *
4440 *
4441 * PARAMETERS :
4442 *   @request  : request sent from framework
4443 *
4444 *
4445 * RETURN     : success: NO_ERROR
4446 *              failure:
4447 *==========================================================================*/
4448int QCamera3HardwareInterface::translateToHalMetadata
4449                                  (const camera3_capture_request_t *request,
4450                                   metadata_buffer_t *hal_metadata)
4451{
4452    int rc = 0;
4453    CameraMetadata frame_settings;
4454    frame_settings = request->settings;
4455
4456    /* Do not change the order of the following list unless you know what you are
4457     * doing.
4458     * The order is laid out in such a way that parameters in the front of the table
4459     * may be used to override the parameters later in the table. Examples are:
4460     * 1. META_MODE should precede AEC/AWB/AF MODE
4461     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
4462     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
4463     * 4. Any mode should precede it's corresponding settings
4464     */
4465    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4466        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4467        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
4468                sizeof(metaMode), &metaMode);
4469        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4470           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4471           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4472                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4473                                             fwk_sceneMode);
4474           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4475                sizeof(sceneMode), &sceneMode);
4476        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4477           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4478           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4479                sizeof(sceneMode), &sceneMode);
4480        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4481           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4482           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4483                sizeof(sceneMode), &sceneMode);
4484        }
4485    }
4486
4487    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4488        uint8_t fwk_aeMode =
4489            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4490        uint8_t aeMode;
4491        int32_t redeye;
4492
4493        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4494            aeMode = CAM_AE_MODE_OFF;
4495        } else {
4496            aeMode = CAM_AE_MODE_ON;
4497        }
4498        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4499            redeye = 1;
4500        } else {
4501            redeye = 0;
4502        }
4503
4504        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
4505                                          sizeof(AE_FLASH_MODE_MAP),
4506                                          fwk_aeMode);
4507        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
4508                sizeof(aeMode), &aeMode);
4509        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4510                sizeof(flashMode), &flashMode);
4511        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
4512                sizeof(redeye), &redeye);
4513    }
4514
4515    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
4516        uint8_t fwk_whiteLevel =
4517            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
4518        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
4519                sizeof(WHITE_BALANCE_MODES_MAP),
4520                fwk_whiteLevel);
4521        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
4522                sizeof(whiteLevel), &whiteLevel);
4523    }
4524
4525    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
4526        uint8_t fwk_focusMode =
4527            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
4528        uint8_t focusMode;
4529        focusMode = lookupHalName(FOCUS_MODES_MAP,
4530                                   sizeof(FOCUS_MODES_MAP),
4531                                   fwk_focusMode);
4532        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
4533                sizeof(focusMode), &focusMode);
4534    }
4535
4536    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
4537        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
4538        rc = AddSetMetaEntryToBatch(hal_metadata,
4539                CAM_INTF_META_LENS_FOCUS_DISTANCE,
4540                sizeof(focalDistance), &focalDistance);
4541    }
4542
4543    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
4544        int32_t antibandingMode =
4545            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
4546        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
4547                sizeof(antibandingMode), &antibandingMode);
4548    }
4549
4550    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4551        int32_t expCompensation = frame_settings.find(
4552            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4553        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4554            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4555        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4556            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4557        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4558          sizeof(expCompensation), &expCompensation);
4559    }
4560
4561    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4562        int32_t expCompensation = frame_settings.find(
4563            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4564        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4565            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4566        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4567            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4568        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4569          sizeof(expCompensation), &expCompensation);
4570    }
4571
4572    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
4573        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
4574        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
4575                sizeof(aeLock), &aeLock);
4576    }
4577    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4578        cam_fps_range_t fps_range;
4579        fps_range.min_fps =
4580            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
4581        fps_range.max_fps =
4582            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4583        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
4584                sizeof(fps_range), &fps_range);
4585    }
4586
4587    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
4588        uint8_t awbLock =
4589            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
4590        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
4591                sizeof(awbLock), &awbLock);
4592    }
4593
4594    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
4595        uint8_t fwk_effectMode =
4596            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
4597        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
4598                sizeof(EFFECT_MODES_MAP),
4599                fwk_effectMode);
4600        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
4601                sizeof(effectMode), &effectMode);
4602    }
4603
4604    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
4605        uint8_t colorCorrectMode =
4606            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
4607        rc =
4608            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
4609                    sizeof(colorCorrectMode), &colorCorrectMode);
4610    }
4611
4612    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
4613        cam_color_correct_gains_t colorCorrectGains;
4614        for (int i = 0; i < 4; i++) {
4615            colorCorrectGains.gains[i] =
4616                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
4617        }
4618        rc =
4619            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
4620                    sizeof(colorCorrectGains), &colorCorrectGains);
4621    }
4622
4623    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
4624        cam_color_correct_matrix_t colorCorrectTransform;
4625        cam_rational_type_t transform_elem;
4626        int num = 0;
4627        for (int i = 0; i < 3; i++) {
4628           for (int j = 0; j < 3; j++) {
4629              transform_elem.numerator =
4630                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
4631              transform_elem.denominator =
4632                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
4633              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
4634              num++;
4635           }
4636        }
4637        rc =
4638            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
4639                    sizeof(colorCorrectTransform), &colorCorrectTransform);
4640    }
4641
4642    cam_trigger_t aecTrigger;
4643    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
4644    aecTrigger.trigger_id = -1;
4645    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
4646        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
4647        aecTrigger.trigger =
4648            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
4649        aecTrigger.trigger_id =
4650            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
4651        rc = AddSetMetaEntryToBatch(hal_metadata,
4652                CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
4653                sizeof(aecTrigger), &aecTrigger);
4654    }
4655    /*af_trigger must come with a trigger id*/
4656    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
4657        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
4658        cam_trigger_t af_trigger;
4659        af_trigger.trigger =
4660            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
4661        af_trigger.trigger_id =
4662            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
4663        rc = AddSetMetaEntryToBatch(hal_metadata,
4664                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
4665    }
4666
4667    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
4668        int32_t demosaic =
4669            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
4670        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
4671                sizeof(demosaic), &demosaic);
4672    }
4673
4674    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
4675        cam_edge_application_t edge_application;
4676        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4677        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
4678            edge_application.sharpness = 0;
4679        } else {
4680            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
4681                uint8_t edgeStrength =
4682                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
4683                edge_application.sharpness = (int32_t)edgeStrength;
4684            } else {
4685                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
4686            }
4687        }
4688        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
4689                sizeof(edge_application), &edge_application);
4690    }
4691
4692    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
4693        int32_t respectFlashMode = 1;
4694        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4695            uint8_t fwk_aeMode =
4696                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4697            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
4698                respectFlashMode = 0;
4699                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
4700                    __func__);
4701            }
4702        }
4703        if (respectFlashMode) {
4704            uint8_t flashMode =
4705                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
4706            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
4707                                          sizeof(FLASH_MODES_MAP),
4708                                          flashMode);
4709            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
4710            // To check: CAM_INTF_META_FLASH_MODE usage
4711            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4712                          sizeof(flashMode), &flashMode);
4713        }
4714    }
4715
4716    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
4717        uint8_t flashPower =
4718            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
4719        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
4720                sizeof(flashPower), &flashPower);
4721    }
4722
4723    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
4724        int64_t flashFiringTime =
4725            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
4726        rc = AddSetMetaEntryToBatch(hal_metadata,
4727                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
4728    }
4729
4730    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
4731        uint8_t hotPixelMode =
4732            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
4733        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
4734                sizeof(hotPixelMode), &hotPixelMode);
4735    }
4736
4737    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
4738        float lensAperture =
4739            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
4740        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
4741                sizeof(lensAperture), &lensAperture);
4742    }
4743
4744    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
4745        float filterDensity =
4746            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
4747        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
4748                sizeof(filterDensity), &filterDensity);
4749    }
4750
4751    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4752        float focalLength =
4753            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4754        rc = AddSetMetaEntryToBatch(hal_metadata,
4755                CAM_INTF_META_LENS_FOCAL_LENGTH,
4756                sizeof(focalLength), &focalLength);
4757    }
4758
4759    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
4760        uint8_t optStabMode =
4761            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
4762        rc = AddSetMetaEntryToBatch(hal_metadata,
4763                CAM_INTF_META_LENS_OPT_STAB_MODE,
4764                sizeof(optStabMode), &optStabMode);
4765    }
4766
4767    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4768        uint8_t noiseRedMode =
4769            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4770        rc = AddSetMetaEntryToBatch(hal_metadata,
4771                CAM_INTF_META_NOISE_REDUCTION_MODE,
4772                sizeof(noiseRedMode), &noiseRedMode);
4773    }
4774
4775    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
4776        uint8_t noiseRedStrength =
4777            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
4778        rc = AddSetMetaEntryToBatch(hal_metadata,
4779                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
4780                sizeof(noiseRedStrength), &noiseRedStrength);
4781    }
4782
4783    cam_crop_region_t scalerCropRegion;
4784    bool scalerCropSet = false;
4785    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
4786        scalerCropRegion.left =
4787            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
4788        scalerCropRegion.top =
4789            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
4790        scalerCropRegion.width =
4791            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
4792        scalerCropRegion.height =
4793            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
4794        rc = AddSetMetaEntryToBatch(hal_metadata,
4795                CAM_INTF_META_SCALER_CROP_REGION,
4796                sizeof(scalerCropRegion), &scalerCropRegion);
4797        scalerCropSet = true;
4798    }
4799
4800    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
4801        int64_t sensorExpTime =
4802            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
4803        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
4804        rc = AddSetMetaEntryToBatch(hal_metadata,
4805                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
4806                sizeof(sensorExpTime), &sensorExpTime);
4807    }
4808
4809    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
4810        int64_t sensorFrameDuration =
4811            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
4812        int64_t minFrameDuration = getMinFrameDuration(request);
4813        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
4814        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
4815            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
4816        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
4817        rc = AddSetMetaEntryToBatch(hal_metadata,
4818                CAM_INTF_META_SENSOR_FRAME_DURATION,
4819                sizeof(sensorFrameDuration), &sensorFrameDuration);
4820    }
4821
4822    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4823        int32_t sensorSensitivity =
4824            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4825        if (sensorSensitivity <
4826                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
4827            sensorSensitivity =
4828                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
4829        if (sensorSensitivity >
4830                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
4831            sensorSensitivity =
4832                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
4833        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
4834        rc = AddSetMetaEntryToBatch(hal_metadata,
4835                CAM_INTF_META_SENSOR_SENSITIVITY,
4836                sizeof(sensorSensitivity), &sensorSensitivity);
4837    }
4838
4839    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
4840        int32_t shadingMode =
4841            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
4842        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
4843                sizeof(shadingMode), &shadingMode);
4844    }
4845
4846    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
4847        uint8_t shadingStrength =
4848            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
4849        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
4850                sizeof(shadingStrength), &shadingStrength);
4851    }
4852
4853    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
4854        uint8_t fwk_facedetectMode =
4855            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
4856        uint8_t facedetectMode =
4857            lookupHalName(FACEDETECT_MODES_MAP,
4858                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
4859        rc = AddSetMetaEntryToBatch(hal_metadata,
4860                CAM_INTF_META_STATS_FACEDETECT_MODE,
4861                sizeof(facedetectMode), &facedetectMode);
4862    }
4863
4864    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
4865        uint8_t histogramMode =
4866            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
4867        rc = AddSetMetaEntryToBatch(hal_metadata,
4868                CAM_INTF_META_STATS_HISTOGRAM_MODE,
4869                sizeof(histogramMode), &histogramMode);
4870    }
4871
4872    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
4873        uint8_t sharpnessMapMode =
4874            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
4875        rc = AddSetMetaEntryToBatch(hal_metadata,
4876                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
4877                sizeof(sharpnessMapMode), &sharpnessMapMode);
4878    }
4879
4880    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
4881        uint8_t tonemapMode =
4882            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
4883        rc = AddSetMetaEntryToBatch(hal_metadata,
4884                CAM_INTF_META_TONEMAP_MODE,
4885                sizeof(tonemapMode), &tonemapMode);
4886    }
4887    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
4888    /*All tonemap channels will have the same number of points*/
4889    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
4890        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
4891        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
4892        cam_rgb_tonemap_curves tonemapCurves;
4893        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
4894
4895        /* ch0 = G*/
4896        int point = 0;
4897        cam_tonemap_curve_t tonemapCurveGreen;
4898        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
4899            for (int j = 0; j < 2; j++) {
4900               tonemapCurveGreen.tonemap_points[i][j] =
4901                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
4902               point++;
4903            }
4904        }
4905        tonemapCurves.curves[0] = tonemapCurveGreen;
4906
4907        /* ch 1 = B */
4908        point = 0;
4909        cam_tonemap_curve_t tonemapCurveBlue;
4910        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4911            for (int j = 0; j < 2; j++) {
4912               tonemapCurveBlue.tonemap_points[i][j] =
4913                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
4914               point++;
4915            }
4916        }
4917        tonemapCurves.curves[1] = tonemapCurveBlue;
4918
4919        /* ch 2 = R */
4920        point = 0;
4921        cam_tonemap_curve_t tonemapCurveRed;
4922        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4923            for (int j = 0; j < 2; j++) {
4924               tonemapCurveRed.tonemap_points[i][j] =
4925                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
4926               point++;
4927            }
4928        }
4929        tonemapCurves.curves[2] = tonemapCurveRed;
4930
4931        rc = AddSetMetaEntryToBatch(hal_metadata,
4932                CAM_INTF_META_TONEMAP_CURVES,
4933                sizeof(tonemapCurves), &tonemapCurves);
4934    }
4935
4936    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4937        uint8_t captureIntent =
4938            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4939        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
4940                sizeof(captureIntent), &captureIntent);
4941    }
4942
4943    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
4944        uint8_t blackLevelLock =
4945            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
4946        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
4947                sizeof(blackLevelLock), &blackLevelLock);
4948    }
4949
4950    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
4951        uint8_t lensShadingMapMode =
4952            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
4953        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
4954                sizeof(lensShadingMapMode), &lensShadingMapMode);
4955    }
4956
4957    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
4958        cam_area_t roi;
4959        bool reset = true;
4960        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
4961        if (scalerCropSet) {
4962            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4963        }
4964        if (reset) {
4965            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
4966                    sizeof(roi), &roi);
4967        }
4968    }
4969
4970    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
4971        cam_area_t roi;
4972        bool reset = true;
4973        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
4974        if (scalerCropSet) {
4975            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4976        }
4977        if (reset) {
4978            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
4979                    sizeof(roi), &roi);
4980        }
4981    }
4982
4983    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
4984        cam_area_t roi;
4985        bool reset = true;
4986        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
4987        if (scalerCropSet) {
4988            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4989        }
4990        if (reset) {
4991            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AWB_REGIONS,
4992                    sizeof(roi), &roi);
4993        }
4994    }
4995
4996    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
4997        cam_test_pattern_data_t testPatternData;
4998        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
4999        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
5000               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
5001
5002        memset(&testPatternData, 0, sizeof(testPatternData));
5003        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
5004        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
5005                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
5006            int32_t* fwk_testPatternData = frame_settings.find(
5007                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
5008            testPatternData.r = fwk_testPatternData[0];
5009            testPatternData.b = fwk_testPatternData[3];
5010            switch (gCamCapability[mCameraId]->color_arrangement) {
5011            case CAM_FILTER_ARRANGEMENT_RGGB:
5012            case CAM_FILTER_ARRANGEMENT_GRBG:
5013                testPatternData.gr = fwk_testPatternData[1];
5014                testPatternData.gb = fwk_testPatternData[2];
5015                break;
5016            case CAM_FILTER_ARRANGEMENT_GBRG:
5017            case CAM_FILTER_ARRANGEMENT_BGGR:
5018                testPatternData.gr = fwk_testPatternData[2];
5019                testPatternData.gb = fwk_testPatternData[1];
5020                break;
5021            default:
5022                ALOGE("%s: color arrangement %d is not supported", __func__,
5023                    gCamCapability[mCameraId]->color_arrangement);
5024                break;
5025            }
5026        }
5027        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
5028            sizeof(testPatternData), &testPatternData);
5029    }
5030
5031    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
5032        double *gps_coords =
5033            frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
5034        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
5035    }
5036
5037    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
5038        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
5039        const char *gps_methods_src = (const char *)
5040                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
5041        uint32_t count = frame_settings.find(
5042                ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
5043        memset(gps_methods, 0, sizeof(gps_methods));
5044        strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
5045        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
5046    }
5047
5048    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
5049        int64_t gps_timestamp =
5050            frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
5051        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
5052    }
5053
5054    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5055        int32_t orientation =
5056            frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5057        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
5058    }
5059
5060    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
5061        int8_t quality =
5062            frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
5063        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
5064    }
5065
5066    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
5067        int8_t thumb_quality =
5068            frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
5069        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
5070    }
5071
5072    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5073        cam_dimension_t dim;
5074        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5075        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5076        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
5077    }
5078
5079    // Internal metadata
5080    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
5081        uint8_t* privatedata =
5082            frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
5083        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
5084            sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
5085    }
5086
5087    // EV step
5088    rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
5089            sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
5090
5091    return rc;
5092}
5093
5094/*===========================================================================
5095 * FUNCTION   : captureResultCb
5096 *
5097 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
5098 *
5099 * PARAMETERS :
5100 *   @frame  : frame information from mm-camera-interface
5101 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
5102 *   @userdata: userdata
5103 *
5104 * RETURN     : NONE
5105 *==========================================================================*/
5106void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
5107                camera3_stream_buffer_t *buffer,
5108                uint32_t frame_number, void *userdata)
5109{
5110    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
5111    if (hw == NULL) {
5112        ALOGE("%s: Invalid hw %p", __func__, hw);
5113        return;
5114    }
5115
5116    hw->captureResultCb(metadata, buffer, frame_number);
5117    return;
5118}
5119
5120
5121/*===========================================================================
5122 * FUNCTION   : initialize
5123 *
5124 * DESCRIPTION: Pass framework callback pointers to HAL
5125 *
5126 * PARAMETERS :
5127 *
5128 *
5129 * RETURN     : Success : 0
5130 *              Failure: -ENODEV
5131 *==========================================================================*/
5132
5133int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
5134                                  const camera3_callback_ops_t *callback_ops)
5135{
5136    ALOGV("%s: E", __func__);
5137    QCamera3HardwareInterface *hw =
5138        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5139    if (!hw) {
5140        ALOGE("%s: NULL camera device", __func__);
5141        return -ENODEV;
5142    }
5143
5144    int rc = hw->initialize(callback_ops);
5145    ALOGV("%s: X", __func__);
5146    return rc;
5147}
5148
5149/*===========================================================================
5150 * FUNCTION   : configure_streams
5151 *
5152 * DESCRIPTION:
5153 *
5154 * PARAMETERS :
5155 *
5156 *
5157 * RETURN     : Success: 0
5158 *              Failure: -EINVAL (if stream configuration is invalid)
5159 *                       -ENODEV (fatal error)
5160 *==========================================================================*/
5161
5162int QCamera3HardwareInterface::configure_streams(
5163        const struct camera3_device *device,
5164        camera3_stream_configuration_t *stream_list)
5165{
5166    ALOGV("%s: E", __func__);
5167    QCamera3HardwareInterface *hw =
5168        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5169    if (!hw) {
5170        ALOGE("%s: NULL camera device", __func__);
5171        return -ENODEV;
5172    }
5173    int rc = hw->configureStreams(stream_list);
5174    ALOGV("%s: X", __func__);
5175    return rc;
5176}
5177
5178/*===========================================================================
5179 * FUNCTION   : register_stream_buffers
5180 *
5181 * DESCRIPTION: Register stream buffers with the device
5182 *
5183 * PARAMETERS :
5184 *
5185 * RETURN     :
5186 *==========================================================================*/
5187int QCamera3HardwareInterface::register_stream_buffers(
5188        const struct camera3_device *device,
5189        const camera3_stream_buffer_set_t *buffer_set)
5190{
5191    ALOGV("%s: E", __func__);
5192    QCamera3HardwareInterface *hw =
5193        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5194    if (!hw) {
5195        ALOGE("%s: NULL camera device", __func__);
5196        return -ENODEV;
5197    }
5198    int rc = hw->registerStreamBuffers(buffer_set);
5199    ALOGV("%s: X", __func__);
5200    return rc;
5201}
5202
5203/*===========================================================================
5204 * FUNCTION   : construct_default_request_settings
5205 *
5206 * DESCRIPTION: Configure a settings buffer to meet the required use case
5207 *
5208 * PARAMETERS :
5209 *
5210 *
5211 * RETURN     : Success: Return valid metadata
5212 *              Failure: Return NULL
5213 *==========================================================================*/
5214const camera_metadata_t* QCamera3HardwareInterface::
5215    construct_default_request_settings(const struct camera3_device *device,
5216                                        int type)
5217{
5218
5219    ALOGV("%s: E", __func__);
5220    camera_metadata_t* fwk_metadata = NULL;
5221    QCamera3HardwareInterface *hw =
5222        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5223    if (!hw) {
5224        ALOGE("%s: NULL camera device", __func__);
5225        return NULL;
5226    }
5227
5228    fwk_metadata = hw->translateCapabilityToMetadata(type);
5229
5230    ALOGV("%s: X", __func__);
5231    return fwk_metadata;
5232}
5233
5234/*===========================================================================
5235 * FUNCTION   : process_capture_request
5236 *
5237 * DESCRIPTION:
5238 *
5239 * PARAMETERS :
5240 *
5241 *
5242 * RETURN     :
5243 *==========================================================================*/
5244int QCamera3HardwareInterface::process_capture_request(
5245                    const struct camera3_device *device,
5246                    camera3_capture_request_t *request)
5247{
5248    ALOGV("%s: E", __func__);
5249    QCamera3HardwareInterface *hw =
5250        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5251    if (!hw) {
5252        ALOGE("%s: NULL camera device", __func__);
5253        return -EINVAL;
5254    }
5255
5256    int rc = hw->processCaptureRequest(request);
5257    ALOGV("%s: X", __func__);
5258    return rc;
5259}
5260
5261/*===========================================================================
5262 * FUNCTION   : dump
5263 *
5264 * DESCRIPTION:
5265 *
5266 * PARAMETERS :
5267 *
5268 *
5269 * RETURN     :
5270 *==========================================================================*/
5271
5272void QCamera3HardwareInterface::dump(
5273                const struct camera3_device *device, int fd)
5274{
5275    ALOGV("%s: E", __func__);
5276    QCamera3HardwareInterface *hw =
5277        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5278    if (!hw) {
5279        ALOGE("%s: NULL camera device", __func__);
5280        return;
5281    }
5282
5283    hw->dump(fd);
5284    ALOGV("%s: X", __func__);
5285    return;
5286}
5287
5288/*===========================================================================
5289 * FUNCTION   : flush
5290 *
5291 * DESCRIPTION:
5292 *
5293 * PARAMETERS :
5294 *
5295 *
5296 * RETURN     :
5297 *==========================================================================*/
5298
5299int QCamera3HardwareInterface::flush(
5300                const struct camera3_device *device)
5301{
5302    int rc;
5303    ALOGV("%s: E", __func__);
5304    QCamera3HardwareInterface *hw =
5305        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5306    if (!hw) {
5307        ALOGE("%s: NULL camera device", __func__);
5308        return -EINVAL;
5309    }
5310
5311    rc = hw->flush();
5312    ALOGV("%s: X", __func__);
5313    return rc;
5314}
5315
5316/*===========================================================================
5317 * FUNCTION   : close_camera_device
5318 *
5319 * DESCRIPTION:
5320 *
5321 * PARAMETERS :
5322 *
5323 *
5324 * RETURN     :
5325 *==========================================================================*/
5326int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
5327{
5328    ALOGV("%s: E", __func__);
5329    int ret = NO_ERROR;
5330    QCamera3HardwareInterface *hw =
5331        reinterpret_cast<QCamera3HardwareInterface *>(
5332            reinterpret_cast<camera3_device_t *>(device)->priv);
5333    if (!hw) {
5334        ALOGE("NULL camera device");
5335        return BAD_VALUE;
5336    }
5337    delete hw;
5338
5339    pthread_mutex_lock(&mCameraSessionLock);
5340    mCameraSessionActive = 0;
5341    pthread_mutex_unlock(&mCameraSessionLock);
5342    ALOGV("%s: X", __func__);
5343    return ret;
5344}
5345
5346/*===========================================================================
5347 * FUNCTION   : getWaveletDenoiseProcessPlate
5348 *
5349 * DESCRIPTION: query wavelet denoise process plate
5350 *
5351 * PARAMETERS : None
5352 *
5353 * RETURN     : WNR prcocess plate vlaue
5354 *==========================================================================*/
5355cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
5356{
5357    char prop[PROPERTY_VALUE_MAX];
5358    memset(prop, 0, sizeof(prop));
5359    property_get("persist.denoise.process.plates", prop, "0");
5360    int processPlate = atoi(prop);
5361    switch(processPlate) {
5362    case 0:
5363        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
5364    case 1:
5365        return CAM_WAVELET_DENOISE_CBCR_ONLY;
5366    case 2:
5367        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5368    case 3:
5369        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
5370    default:
5371        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5372    }
5373}
5374
5375/*===========================================================================
5376 * FUNCTION   : needRotationReprocess
5377 *
5378 * DESCRIPTION: if rotation needs to be done by reprocess in pp
5379 *
5380 * PARAMETERS : none
5381 *
5382 * RETURN     : true: needed
5383 *              false: no need
5384 *==========================================================================*/
5385bool QCamera3HardwareInterface::needRotationReprocess()
5386{
5387    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
5388        // current rotation is not zero, and pp has the capability to process rotation
5389        ALOGD("%s: need do reprocess for rotation", __func__);
5390        return true;
5391    }
5392
5393    return false;
5394}
5395
5396/*===========================================================================
5397 * FUNCTION   : needReprocess
5398 *
5399 * DESCRIPTION: if reprocess in needed
5400 *
5401 * PARAMETERS : none
5402 *
5403 * RETURN     : true: needed
5404 *              false: no need
5405 *==========================================================================*/
5406bool QCamera3HardwareInterface::needReprocess()
5407{
5408    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
5409        // TODO: add for ZSL HDR later
5410        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5411        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5412        return true;
5413    }
5414    return needRotationReprocess();
5415}
5416
5417/*===========================================================================
5418 * FUNCTION   : addOfflineReprocChannel
5419 *
5420 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
5421 *              coming from input channel
5422 *
5423 * PARAMETERS :
5424 *   @pInputChannel : ptr to input channel whose frames will be post-processed
5425 *
5426 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
5427 *==========================================================================*/
5428QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
5429              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
5430{
5431    int32_t rc = NO_ERROR;
5432    QCamera3ReprocessChannel *pChannel = NULL;
5433    if (pInputChannel == NULL) {
5434        ALOGE("%s: input channel obj is NULL", __func__);
5435        return NULL;
5436    }
5437
5438    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5439            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5440    if (NULL == pChannel) {
5441        ALOGE("%s: no mem for reprocess channel", __func__);
5442        return NULL;
5443    }
5444
5445    rc = pChannel->initialize();
5446    if (rc != NO_ERROR) {
5447        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5448        delete pChannel;
5449        return NULL;
5450    }
5451
5452    // pp feature config
5453    cam_pp_feature_config_t pp_config;
5454    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5455
5456    if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
5457        cam_edge_application_t *edge = (cam_edge_application_t *)
5458                POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
5459        if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
5460            pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5461            pp_config.sharpness = edge->sharpness;
5462        }
5463    }
5464
5465    if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
5466        uint8_t *noise_mode = (uint8_t *)POINTER_OF(
5467                CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
5468        if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
5469            pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5470            pp_config.denoise2d.denoise_enable = 1;
5471            pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5472        }
5473    }
5474
5475    if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
5476        int32_t *rotation = (int32_t *)POINTER_OF(
5477                CAM_INTF_META_JPEG_ORIENTATION, metadata);
5478
5479        if (needRotationReprocess()) {
5480            pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5481            if (*rotation == 0) {
5482                pp_config.rotation = ROTATE_0;
5483            } else if (*rotation == 90) {
5484                pp_config.rotation = ROTATE_90;
5485            } else if (*rotation == 180) {
5486                pp_config.rotation = ROTATE_180;
5487            } else if (*rotation == 270) {
5488                pp_config.rotation = ROTATE_270;
5489            }
5490        }
5491    }
5492
5493    rc = pChannel->addReprocStreamsFromSource(pp_config,
5494                                             pInputChannel,
5495                                             mMetadataChannel);
5496
5497    if (rc != NO_ERROR) {
5498        delete pChannel;
5499        return NULL;
5500    }
5501    return pChannel;
5502}
5503
5504}; //end namespace qcamera
5505