QCamera3HWI.cpp revision b2eca6d7c5df891d257be0d220830d1d49f120b2
1/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34#include <cutils/properties.h>
35#include <hardware/camera3.h>
36#include <camera/CameraMetadata.h>
37#include <stdlib.h>
38#include <fcntl.h>
39#include <stdint.h>
40#include <utils/Log.h>
41#include <utils/Errors.h>
42#include <ui/Fence.h>
43#include <gralloc_priv.h>
44#include "QCamera3HWI.h"
45#include "QCamera3Mem.h"
46#include "QCamera3Channel.h"
47#include "QCamera3PostProc.h"
48
49using namespace android;
50
51namespace qcamera {
52
53#define MAX(a, b) ((a) > (b) ? (a) : (b))
54
55#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
56cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
57const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
58
59pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
60    PTHREAD_MUTEX_INITIALIZER;
61unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
62
63const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
64    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
65    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
66    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
67    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
69    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
70    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
71    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
72    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
73};
74
75const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
76    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
77    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
78    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
79    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
80    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
81    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
82    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
83    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
84    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
85};
86
87const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
88    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
89    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
90    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
92    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
93    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
94    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
95    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
96    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
97    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
98    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
99    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
100    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
101    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
102    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
103    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
104};
105
106const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
107    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
108    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
109    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
110    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
111    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
112    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
118    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
119    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
120};
121
122const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
123    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
124    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
126    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
127    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
128};
129
130const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
131    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
132    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
133    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
134};
135
136const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
137    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
138    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
139};
140
141const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
142    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
143      CAM_FOCUS_UNCALIBRATED },
144    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
145      CAM_FOCUS_APPROXIMATE },
146    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
147      CAM_FOCUS_CALIBRATED }
148};
149
150const int32_t available_thumbnail_sizes[] = {0, 0,
151                                             176, 144,
152                                             320, 240,
153                                             432, 288,
154                                             480, 288,
155                                             512, 288,
156                                             512, 384};
157
158const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
159    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
160    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
161    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
162    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
163    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
164};
165/* Custom tag definitions */
166
167// QCamera3 camera metadata sections
168enum qcamera3_ext_section {
169    QCAMERA3_PRIVATEDATA = VENDOR_SECTION,
170    QCAMERA3_SECTIONS_END
171};
172
173const int QCAMERA3_SECTION_COUNT = QCAMERA3_SECTIONS_END - VENDOR_SECTION;
174
175enum qcamera3_ext_section_ranges {
176    QCAMERA3_PRIVATEDATA_START = QCAMERA3_PRIVATEDATA << 16
177};
178
179enum qcamera3_ext_tags {
180    QCAMERA3_PRIVATEDATA_REPROCESS = QCAMERA3_PRIVATEDATA_START,
181    QCAMERA3_PRIVATEDATA_END
182};
183
184enum qcamera3_ext_tags qcamera3_ext3_section_bounds[QCAMERA3_SECTIONS_END -
185    VENDOR_SECTION] = {
186        QCAMERA3_PRIVATEDATA_END
187} ;
188
189typedef struct vendor_tag_info {
190    const char *tag_name;
191    uint8_t     tag_type;
192} vendor_tag_info_t;
193
194const char *qcamera3_ext_section_names[QCAMERA3_SECTIONS_END -
195        VENDOR_SECTION] = {
196    "org.codeaurora.qcamera3.privatedata"
197};
198
199vendor_tag_info_t qcamera3_privatedata[QCAMERA3_PRIVATEDATA_END - QCAMERA3_PRIVATEDATA_START] = {
200    { "privatedata", TYPE_BYTE }
201};
202
203vendor_tag_info_t *qcamera3_tag_info[QCAMERA3_SECTIONS_END -
204        VENDOR_SECTION] = {
205    qcamera3_privatedata
206};
207
208camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
209    initialize:                         QCamera3HardwareInterface::initialize,
210    configure_streams:                  QCamera3HardwareInterface::configure_streams,
211    register_stream_buffers:            NULL,
212    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
213    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
214    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
215    dump:                               QCamera3HardwareInterface::dump,
216    flush:                              QCamera3HardwareInterface::flush,
217    reserved:                           {0},
218};
219
220int QCamera3HardwareInterface::kMaxInFlight = 5;
221
222/*===========================================================================
223 * FUNCTION   : QCamera3HardwareInterface
224 *
225 * DESCRIPTION: constructor of QCamera3HardwareInterface
226 *
227 * PARAMETERS :
228 *   @cameraId  : camera ID
229 *
230 * RETURN     : none
231 *==========================================================================*/
232QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
233    : mCameraId(cameraId),
234      mCameraHandle(NULL),
235      mCameraOpened(false),
236      mCameraInitialized(false),
237      mCallbackOps(NULL),
238      mInputStream(NULL),
239      mMetadataChannel(NULL),
240      mPictureChannel(NULL),
241      mRawChannel(NULL),
242      mFirstRequest(false),
243      mParamHeap(NULL),
244      mParameters(NULL),
245      mLoopBackResult(NULL),
246      mMinProcessedFrameDuration(0),
247      mMinJpegFrameDuration(0),
248      mMinRawFrameDuration(0),
249      m_pPowerModule(NULL),
250      mHdrHint(false),
251      mMetaFrameCount(0)
252{
253    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
254    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
255    mCameraDevice.common.close = close_camera_device;
256    mCameraDevice.ops = &mCameraOps;
257    mCameraDevice.priv = this;
258    gCamCapability[cameraId]->version = CAM_HAL_V3;
259    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
260    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
261    gCamCapability[cameraId]->min_num_pp_bufs = 3;
262
263    pthread_cond_init(&mRequestCond, NULL);
264    mPendingRequest = 0;
265    mCurrentRequestId = -1;
266    pthread_mutex_init(&mMutex, NULL);
267
268    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
269        mDefaultMetadata[i] = NULL;
270
271#ifdef HAS_MULTIMEDIA_HINTS
272    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
273        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
274    }
275#endif
276}
277
278/*===========================================================================
279 * FUNCTION   : ~QCamera3HardwareInterface
280 *
281 * DESCRIPTION: destructor of QCamera3HardwareInterface
282 *
283 * PARAMETERS : none
284 *
285 * RETURN     : none
286 *==========================================================================*/
287QCamera3HardwareInterface::~QCamera3HardwareInterface()
288{
289    ALOGV("%s: E", __func__);
290    /* We need to stop all streams before deleting any stream */
291
292    // NOTE: 'camera3_stream_t *' objects are already freed at
293    //        this stage by the framework
294    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
295        it != mStreamInfo.end(); it++) {
296        QCamera3Channel *channel = (*it)->channel;
297        if (channel) {
298            channel->stop();
299        }
300    }
301
302    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
303        it != mStreamInfo.end(); it++) {
304        QCamera3Channel *channel = (*it)->channel;
305        if (channel)
306            delete channel;
307        free (*it);
308    }
309
310    mPictureChannel = NULL;
311
312    /* Clean up all channels */
313    if (mCameraInitialized) {
314        if (mMetadataChannel) {
315            mMetadataChannel->stop();
316            delete mMetadataChannel;
317            mMetadataChannel = NULL;
318        }
319        deinitParameters();
320    }
321
322    if (mCameraOpened)
323        closeCamera();
324
325    mPendingBuffersMap.mPendingBufferList.clear();
326    mPendingRequestsList.clear();
327
328    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
329        if (mDefaultMetadata[i])
330            free_camera_metadata(mDefaultMetadata[i]);
331
332    pthread_cond_destroy(&mRequestCond);
333
334    pthread_mutex_destroy(&mMutex);
335    ALOGV("%s: X", __func__);
336}
337
338/*===========================================================================
339 * FUNCTION   : openCamera
340 *
341 * DESCRIPTION: open camera
342 *
343 * PARAMETERS :
344 *   @hw_device  : double ptr for camera device struct
345 *
346 * RETURN     : int32_t type of status
347 *              NO_ERROR  -- success
348 *              none-zero failure code
349 *==========================================================================*/
350int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
351{
352    int rc = 0;
353    pthread_mutex_lock(&mCameraSessionLock);
354    if (mCameraSessionActive) {
355        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
356        pthread_mutex_unlock(&mCameraSessionLock);
357        return -EUSERS;
358    }
359
360    if (mCameraOpened) {
361        *hw_device = NULL;
362        return PERMISSION_DENIED;
363    }
364
365    rc = openCamera();
366    if (rc == 0) {
367        *hw_device = &mCameraDevice.common;
368        mCameraSessionActive = 1;
369    } else
370        *hw_device = NULL;
371
372#ifdef HAS_MULTIMEDIA_HINTS
373    if (rc == 0) {
374        if (m_pPowerModule) {
375            if (m_pPowerModule->powerHint) {
376                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
377                        (void *)"state=1");
378            }
379        }
380    }
381#endif
382    pthread_mutex_unlock(&mCameraSessionLock);
383    return rc;
384}
385
386/*===========================================================================
387 * FUNCTION   : openCamera
388 *
389 * DESCRIPTION: open camera
390 *
391 * PARAMETERS : none
392 *
393 * RETURN     : int32_t type of status
394 *              NO_ERROR  -- success
395 *              none-zero failure code
396 *==========================================================================*/
397int QCamera3HardwareInterface::openCamera()
398{
399    if (mCameraHandle) {
400        ALOGE("Failure: Camera already opened");
401        return ALREADY_EXISTS;
402    }
403    mCameraHandle = camera_open(mCameraId);
404    if (!mCameraHandle) {
405        ALOGE("camera_open failed.");
406        return UNKNOWN_ERROR;
407    }
408
409    mCameraOpened = true;
410
411    return NO_ERROR;
412}
413
414/*===========================================================================
415 * FUNCTION   : closeCamera
416 *
417 * DESCRIPTION: close camera
418 *
419 * PARAMETERS : none
420 *
421 * RETURN     : int32_t type of status
422 *              NO_ERROR  -- success
423 *              none-zero failure code
424 *==========================================================================*/
425int QCamera3HardwareInterface::closeCamera()
426{
427    int rc = NO_ERROR;
428
429    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
430    mCameraHandle = NULL;
431    mCameraOpened = false;
432
433#ifdef HAS_MULTIMEDIA_HINTS
434    if (rc == NO_ERROR) {
435        if (m_pPowerModule) {
436            if (m_pPowerModule->powerHint) {
437                if(mHdrHint == true) {
438                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
439                            (void *)"state=3");
440                    mHdrHint = false;
441                }
442                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
443                        (void *)"state=0");
444            }
445        }
446    }
447#endif
448
449    return rc;
450}
451
452/*===========================================================================
453 * FUNCTION   : initialize
454 *
455 * DESCRIPTION: Initialize frameworks callback functions
456 *
457 * PARAMETERS :
458 *   @callback_ops : callback function to frameworks
459 *
460 * RETURN     :
461 *
462 *==========================================================================*/
463int QCamera3HardwareInterface::initialize(
464        const struct camera3_callback_ops *callback_ops)
465{
466    int rc;
467
468    pthread_mutex_lock(&mMutex);
469
470    rc = initParameters();
471    if (rc < 0) {
472        ALOGE("%s: initParamters failed %d", __func__, rc);
473       goto err1;
474    }
475    mCallbackOps = callback_ops;
476
477    pthread_mutex_unlock(&mMutex);
478    mCameraInitialized = true;
479    return 0;
480
481err1:
482    pthread_mutex_unlock(&mMutex);
483    return rc;
484}
485
486/*===========================================================================
487 * FUNCTION   : configureStreams
488 *
489 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
490 *              and output streams.
491 *
492 * PARAMETERS :
493 *   @stream_list : streams to be configured
494 *
495 * RETURN     :
496 *
497 *==========================================================================*/
498int QCamera3HardwareInterface::configureStreams(
499        camera3_stream_configuration_t *streamList)
500{
501    int rc = 0;
502
503    // Sanity check stream_list
504    if (streamList == NULL) {
505        ALOGE("%s: NULL stream configuration", __func__);
506        return BAD_VALUE;
507    }
508    if (streamList->streams == NULL) {
509        ALOGE("%s: NULL stream list", __func__);
510        return BAD_VALUE;
511    }
512
513    if (streamList->num_streams < 1) {
514        ALOGE("%s: Bad number of streams requested: %d", __func__,
515                streamList->num_streams);
516        return BAD_VALUE;
517    }
518
519    /* first invalidate all the steams in the mStreamList
520     * if they appear again, they will be validated */
521    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
522            it != mStreamInfo.end(); it++) {
523        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
524        channel->stop();
525        (*it)->status = INVALID;
526    }
527    if (mMetadataChannel) {
528        /* If content of mStreamInfo is not 0, there is metadata stream */
529        mMetadataChannel->stop();
530    }
531
532#ifdef HAS_MULTIMEDIA_HINTS
533    if(mHdrHint == true) {
534        if (m_pPowerModule) {
535            if (m_pPowerModule->powerHint) {
536                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
537                        (void *)"state=3");
538                mHdrHint = false;
539            }
540        }
541    }
542#endif
543
544    pthread_mutex_lock(&mMutex);
545
546    bool isZsl = false;
547    camera3_stream_t *inputStream = NULL;
548    camera3_stream_t *jpegStream = NULL;
549    cam_stream_size_info_t stream_config_info;
550
551    for (size_t i = 0; i < streamList->num_streams; i++) {
552        camera3_stream_t *newStream = streamList->streams[i];
553        ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
554                __func__, newStream->stream_type, newStream->format,
555                 newStream->width, newStream->height);
556        //if the stream is in the mStreamList validate it
557        bool stream_exists = false;
558        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
559                it != mStreamInfo.end(); it++) {
560            if ((*it)->stream == newStream) {
561                QCamera3Channel *channel =
562                    (QCamera3Channel*)(*it)->stream->priv;
563                stream_exists = true;
564                delete channel;
565                (*it)->status = VALID;
566                (*it)->stream->priv = NULL;
567                (*it)->channel = NULL;
568            }
569        }
570        if (!stream_exists) {
571            //new stream
572            stream_info_t* stream_info;
573            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
574            stream_info->stream = newStream;
575            stream_info->status = VALID;
576            stream_info->channel = NULL;
577            mStreamInfo.push_back(stream_info);
578        }
579        if (newStream->stream_type == CAMERA3_STREAM_INPUT
580                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
581            if (inputStream != NULL) {
582                ALOGE("%s: Multiple input streams requested!", __func__);
583                pthread_mutex_unlock(&mMutex);
584                return BAD_VALUE;
585            }
586            inputStream = newStream;
587        }
588        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
589            jpegStream = newStream;
590        }
591    }
592    mInputStream = inputStream;
593
594    /*clean up invalid streams*/
595    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
596            it != mStreamInfo.end();) {
597        if(((*it)->status) == INVALID){
598            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
599            delete channel;
600            free(*it);
601            it = mStreamInfo.erase(it);
602        } else {
603            it++;
604        }
605    }
606
607    if (mMetadataChannel) {
608        delete mMetadataChannel;
609        mMetadataChannel = NULL;
610    }
611
612    //Create metadata channel and initialize it
613    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
614                    mCameraHandle->ops, captureResultCb,
615                    &gCamCapability[mCameraId]->padding_info, this);
616    if (mMetadataChannel == NULL) {
617        ALOGE("%s: failed to allocate metadata channel", __func__);
618        rc = -ENOMEM;
619        pthread_mutex_unlock(&mMutex);
620        return rc;
621    }
622    rc = mMetadataChannel->initialize();
623    if (rc < 0) {
624        ALOGE("%s: metadata channel initialization failed", __func__);
625        delete mMetadataChannel;
626        mMetadataChannel = NULL;
627        pthread_mutex_unlock(&mMutex);
628        return rc;
629    }
630
631    /* Allocate channel objects for the requested streams */
632    for (size_t i = 0; i < streamList->num_streams; i++) {
633        camera3_stream_t *newStream = streamList->streams[i];
634        uint32_t stream_usage = newStream->usage;
635        stream_config_info.stream_sizes[i].width = newStream->width;
636        stream_config_info.stream_sizes[i].height = newStream->height;
637        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
638            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
639            //for zsl stream the size is active array size
640            isZsl = true;
641            stream_config_info.stream_sizes[i].width =
642                    gCamCapability[mCameraId]->active_array_size.width;
643            stream_config_info.stream_sizes[i].height =
644                    gCamCapability[mCameraId]->active_array_size.height;
645            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
646        } else {
647           //for non zsl streams find out the format
648           switch (newStream->format) {
649           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
650              {
651                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
652                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
653                 } else {
654                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
655                 }
656              }
657              break;
658           case HAL_PIXEL_FORMAT_YCbCr_420_888:
659              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
660#ifdef HAS_MULTIMEDIA_HINTS
661              if (m_pPowerModule) {
662                  if (m_pPowerModule->powerHint) {
663                      m_pPowerModule->powerHint(m_pPowerModule,
664                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
665                      mHdrHint = true;
666                  }
667              }
668#endif
669              break;
670           case HAL_PIXEL_FORMAT_BLOB:
671              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
672              break;
673           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
674           case HAL_PIXEL_FORMAT_RAW16:
675              stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
676              break;
677           default:
678              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
679              break;
680           }
681        }
682        if (newStream->priv == NULL) {
683            //New stream, construct channel
684            switch (newStream->stream_type) {
685            case CAMERA3_STREAM_INPUT:
686                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
687                break;
688            case CAMERA3_STREAM_BIDIRECTIONAL:
689                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
690                    GRALLOC_USAGE_HW_CAMERA_WRITE;
691                break;
692            case CAMERA3_STREAM_OUTPUT:
693                /* For video encoding stream, set read/write rarely
694                 * flag so that they may be set to un-cached */
695                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
696                    newStream->usage =
697                         (GRALLOC_USAGE_SW_READ_RARELY |
698                         GRALLOC_USAGE_SW_WRITE_RARELY |
699                         GRALLOC_USAGE_HW_CAMERA_WRITE);
700                else
701                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
702                break;
703            default:
704                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
705                break;
706            }
707
708            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
709                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
710                QCamera3Channel *channel = NULL;
711                switch (newStream->format) {
712                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
713                case HAL_PIXEL_FORMAT_YCbCr_420_888:
714                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
715                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
716                            mCameraHandle->ops, captureResultCb,
717                            &gCamCapability[mCameraId]->padding_info,
718                            this,
719                            newStream,
720                            (cam_stream_type_t) stream_config_info.type[i]);
721                    if (channel == NULL) {
722                        ALOGE("%s: allocation of channel failed", __func__);
723                        pthread_mutex_unlock(&mMutex);
724                        return -ENOMEM;
725                    }
726
727                    newStream->priv = channel;
728                    break;
729                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
730                case HAL_PIXEL_FORMAT_RAW16:
731                    newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
732                    mRawChannel = new QCamera3RawChannel(
733                            mCameraHandle->camera_handle,
734                            mCameraHandle->ops, captureResultCb,
735                            &gCamCapability[mCameraId]->padding_info,
736                            this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
737                    if (mRawChannel == NULL) {
738                        ALOGE("%s: allocation of raw channel failed", __func__);
739                        pthread_mutex_unlock(&mMutex);
740                        return -ENOMEM;
741                    }
742
743                    newStream->priv = (QCamera3Channel*)mRawChannel;
744                    break;
745                case HAL_PIXEL_FORMAT_BLOB:
746                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
747                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
748                            mCameraHandle->ops, captureResultCb,
749                            &gCamCapability[mCameraId]->padding_info, this, newStream);
750                    if (mPictureChannel == NULL) {
751                        ALOGE("%s: allocation of channel failed", __func__);
752                        pthread_mutex_unlock(&mMutex);
753                        return -ENOMEM;
754                    }
755                    newStream->priv = (QCamera3Channel*)mPictureChannel;
756                    break;
757
758                default:
759                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
760                    break;
761                }
762            }
763
764            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
765                    it != mStreamInfo.end(); it++) {
766                if ((*it)->stream == newStream) {
767                    (*it)->channel = (QCamera3Channel*) newStream->priv;
768                    break;
769                }
770            }
771        } else {
772            // Channel already exists for this stream
773            // Do nothing for now
774        }
775    }
776
777    if (isZsl)
778        mPictureChannel->overrideYuvSize(
779                gCamCapability[mCameraId]->active_array_size.width,
780                gCamCapability[mCameraId]->active_array_size.height);
781
782    int32_t hal_version = CAM_HAL_V3;
783    stream_config_info.num_streams = streamList->num_streams;
784
785    // settings/parameters don't carry over for new configureStreams
786    memset(mParameters, 0, sizeof(metadata_buffer_t));
787
788    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
789    AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
790                sizeof(hal_version), &hal_version);
791
792    AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
793                sizeof(stream_config_info), &stream_config_info);
794
795    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
796
797    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
798    mPendingRequestsList.clear();
799    mPendingFrameDropList.clear();
800    // Initialize/Reset the pending buffers list
801    mPendingBuffersMap.num_buffers = 0;
802    mPendingBuffersMap.mPendingBufferList.clear();
803
804    mFirstRequest = true;
805
806    //Get min frame duration for this streams configuration
807    deriveMinFrameDuration();
808
809    pthread_mutex_unlock(&mMutex);
810    return rc;
811}
812
813/*===========================================================================
814 * FUNCTION   : validateCaptureRequest
815 *
816 * DESCRIPTION: validate a capture request from camera service
817 *
818 * PARAMETERS :
819 *   @request : request from framework to process
820 *
821 * RETURN     :
822 *
823 *==========================================================================*/
824int QCamera3HardwareInterface::validateCaptureRequest(
825                    camera3_capture_request_t *request)
826{
827    ssize_t idx = 0;
828    const camera3_stream_buffer_t *b;
829    CameraMetadata meta;
830
831    /* Sanity check the request */
832    if (request == NULL) {
833        ALOGE("%s: NULL capture request", __func__);
834        return BAD_VALUE;
835    }
836
837    if (request->settings == NULL && mFirstRequest) {
838        /*settings cannot be null for the first request*/
839        return BAD_VALUE;
840    }
841
842    uint32_t frameNumber = request->frame_number;
843    if (request->input_buffer != NULL &&
844            request->input_buffer->stream != mInputStream) {
845        ALOGE("%s: Request %d: Input buffer not from input stream!",
846                __FUNCTION__, frameNumber);
847        return BAD_VALUE;
848    }
849    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
850        ALOGE("%s: Request %d: No output buffers provided!",
851                __FUNCTION__, frameNumber);
852        return BAD_VALUE;
853    }
854    if (request->input_buffer != NULL) {
855        b = request->input_buffer;
856        QCamera3Channel *channel =
857            static_cast<QCamera3Channel*>(b->stream->priv);
858        if (channel == NULL) {
859            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
860                    __func__, frameNumber, idx);
861            return BAD_VALUE;
862        }
863        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
864            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
865                    __func__, frameNumber, idx);
866            return BAD_VALUE;
867        }
868        if (b->release_fence != -1) {
869            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
870                    __func__, frameNumber, idx);
871            return BAD_VALUE;
872        }
873        if (b->buffer == NULL) {
874            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
875                    __func__, frameNumber, idx);
876            return BAD_VALUE;
877        }
878    }
879
880    // Validate all buffers
881    b = request->output_buffers;
882    do {
883        QCamera3Channel *channel =
884                static_cast<QCamera3Channel*>(b->stream->priv);
885        if (channel == NULL) {
886            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
887                    __func__, frameNumber, idx);
888            return BAD_VALUE;
889        }
890        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
891            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
892                    __func__, frameNumber, idx);
893            return BAD_VALUE;
894        }
895        if (b->release_fence != -1) {
896            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
897                    __func__, frameNumber, idx);
898            return BAD_VALUE;
899        }
900        if (b->buffer == NULL) {
901            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
902                    __func__, frameNumber, idx);
903            return BAD_VALUE;
904        }
905        idx++;
906        b = request->output_buffers + idx;
907    } while (idx < (ssize_t)request->num_output_buffers);
908
909    return NO_ERROR;
910}
911
912/*===========================================================================
913 * FUNCTION   : deriveMinFrameDuration
914 *
915 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
916 *              on currently configured streams.
917 *
918 * PARAMETERS : NONE
919 *
920 * RETURN     : NONE
921 *
922 *==========================================================================*/
923void QCamera3HardwareInterface::deriveMinFrameDuration()
924{
925    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
926
927    maxJpegDim = 0;
928    maxProcessedDim = 0;
929    maxRawDim = 0;
930
931    // Figure out maximum jpeg, processed, and raw dimensions
932    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
933        it != mStreamInfo.end(); it++) {
934
935        // Input stream doesn't have valid stream_type
936        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
937            continue;
938
939        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
940        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
941            if (dimension > maxJpegDim)
942                maxJpegDim = dimension;
943        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
944                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
945            if (dimension > maxRawDim)
946                maxRawDim = dimension;
947        } else {
948            if (dimension > maxProcessedDim)
949                maxProcessedDim = dimension;
950        }
951    }
952
953    //Assume all jpeg dimensions are in processed dimensions.
954    if (maxJpegDim > maxProcessedDim)
955        maxProcessedDim = maxJpegDim;
956    //Find the smallest raw dimension that is greater or equal to jpeg dimension
957    if (maxProcessedDim > maxRawDim) {
958        maxRawDim = INT32_MAX;
959        for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
960            i++) {
961
962            int32_t dimension =
963                gCamCapability[mCameraId]->raw_dim[i].width *
964                gCamCapability[mCameraId]->raw_dim[i].height;
965
966            if (dimension >= maxProcessedDim && dimension < maxRawDim)
967                maxRawDim = dimension;
968        }
969    }
970
971    //Find minimum durations for processed, jpeg, and raw
972    for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
973            i++) {
974        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
975                gCamCapability[mCameraId]->raw_dim[i].height) {
976            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
977            break;
978        }
979    }
980    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
981        if (maxProcessedDim ==
982            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
983            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
984            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
985            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
986            break;
987        }
988    }
989}
990
991/*===========================================================================
992 * FUNCTION   : getMinFrameDuration
993 *
994 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
995 *              and current request configuration.
996 *
997 * PARAMETERS : @request: requset sent by the frameworks
998 *
999 * RETURN     : min farme duration for a particular request
1000 *
1001 *==========================================================================*/
1002int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1003{
1004    bool hasJpegStream = false;
1005    bool hasRawStream = false;
1006    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1007        const camera3_stream_t *stream = request->output_buffers[i].stream;
1008        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1009            hasJpegStream = true;
1010        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1011                stream->format == HAL_PIXEL_FORMAT_RAW16)
1012            hasRawStream = true;
1013    }
1014
1015    if (!hasJpegStream)
1016        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1017    else
1018        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1019}
1020
1021/*===========================================================================
1022 * FUNCTION   : handleMetadataWithLock
1023 *
1024 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1025 *
1026 * PARAMETERS : @metadata_buf: metadata buffer
1027 *
1028 * RETURN     :
1029 *
1030 *==========================================================================*/
1031void QCamera3HardwareInterface::handleMetadataWithLock(
1032    mm_camera_super_buf_t *metadata_buf)
1033{
1034    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1035    int32_t frame_number_valid = *(int32_t *)
1036        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1037    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1038        CAM_INTF_META_PENDING_REQUESTS, metadata);
1039    uint32_t frame_number = *(uint32_t *)
1040        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1041    const struct timeval *tv = (const struct timeval *)
1042        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1043    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1044        tv->tv_usec * NSEC_PER_USEC;
1045    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1046        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1047
1048    int32_t urgent_frame_number_valid = *(int32_t *)
1049        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1050    uint32_t urgent_frame_number = *(uint32_t *)
1051        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1052
1053    if (urgent_frame_number_valid) {
1054        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1055          __func__, urgent_frame_number, capture_time);
1056
1057        //Recieved an urgent Frame Number, handle it
1058        //using HAL3.1 quirk for partial results
1059        for (List<PendingRequestInfo>::iterator i =
1060            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1061            camera3_notify_msg_t notify_msg;
1062            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1063                __func__, i->frame_number, urgent_frame_number);
1064
1065            if (i->frame_number < urgent_frame_number &&
1066                i->bNotified == 0) {
1067                notify_msg.type = CAMERA3_MSG_SHUTTER;
1068                notify_msg.message.shutter.frame_number = i->frame_number;
1069                notify_msg.message.shutter.timestamp = capture_time -
1070                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1071                mCallbackOps->notify(mCallbackOps, &notify_msg);
1072                i->timestamp = notify_msg.message.shutter.timestamp;
1073                i->bNotified = 1;
1074                ALOGV("%s: Dummy notification !!!! notify frame_number = %d, capture_time = %lld",
1075                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1076            }
1077
1078            if (i->frame_number == urgent_frame_number) {
1079
1080                camera3_capture_result_t result;
1081
1082                // Send shutter notify to frameworks
1083                notify_msg.type = CAMERA3_MSG_SHUTTER;
1084                notify_msg.message.shutter.frame_number = i->frame_number;
1085                notify_msg.message.shutter.timestamp = capture_time;
1086                mCallbackOps->notify(mCallbackOps, &notify_msg);
1087
1088                i->timestamp = capture_time;
1089                i->bNotified = 1;
1090
1091                // Extract 3A metadata
1092                result.result =
1093                    translateCbUrgentMetadataToResultMetadata(metadata);
1094                // Populate metadata result
1095                result.frame_number = urgent_frame_number;
1096                result.num_output_buffers = 0;
1097                result.output_buffers = NULL;
1098                mCallbackOps->process_capture_result(mCallbackOps, &result);
1099                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1100                     __func__, result.frame_number, capture_time);
1101                free_camera_metadata((camera_metadata_t *)result.result);
1102                break;
1103            }
1104        }
1105    }
1106
1107    if (!frame_number_valid) {
1108        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1109        mMetadataChannel->bufDone(metadata_buf);
1110        free(metadata_buf);
1111        goto done_metadata;
1112    }
1113    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1114            frame_number, capture_time);
1115
1116    // Go through the pending requests info and send shutter/results to frameworks
1117    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1118        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1119        camera3_capture_result_t result;
1120        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1121
1122        // Flush out all entries with less or equal frame numbers.
1123        mPendingRequest--;
1124
1125        // Check whether any stream buffer corresponding to this is dropped or not
1126        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1127        // buffer with CAMERA3_BUFFER_STATUS_ERROR
1128        if (cam_frame_drop.frame_dropped) {
1129            camera3_notify_msg_t notify_msg;
1130            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1131                    j != i->buffers.end(); j++) {
1132                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1133                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1134                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1135                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1136                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1137                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1138                             __func__, i->frame_number, streamID);
1139                      notify_msg.type = CAMERA3_MSG_ERROR;
1140                      notify_msg.message.error.frame_number = i->frame_number;
1141                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1142                      notify_msg.message.error.error_stream = j->stream;
1143                      mCallbackOps->notify(mCallbackOps, &notify_msg);
1144                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1145                             __func__, i->frame_number, streamID);
1146                      PendingFrameDropInfo PendingFrameDrop;
1147                      PendingFrameDrop.frame_number=i->frame_number;
1148                      PendingFrameDrop.stream_ID = streamID;
1149                      // Add the Frame drop info to mPendingFrameDropList
1150                      mPendingFrameDropList.push_back(PendingFrameDrop);
1151                  }
1152                }
1153            }
1154        }
1155
1156        // Send empty metadata with already filled buffers for dropped metadata
1157        // and send valid metadata with already filled buffers for current metadata
1158        if (i->frame_number < frame_number) {
1159            CameraMetadata dummyMetadata;
1160            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1161                    &i->timestamp, 1);
1162            dummyMetadata.update(ANDROID_REQUEST_ID,
1163                    &(i->request_id), 1);
1164            result.result = dummyMetadata.release();
1165        } else {
1166            result.result = translateFromHalMetadata(metadata,
1167                    i->timestamp, i->request_id, i->blob_request);
1168
1169            if (i->blob_request) {
1170                {
1171                    //Dump tuning metadata if enabled and available
1172                    char prop[PROPERTY_VALUE_MAX];
1173                    memset(prop, 0, sizeof(prop));
1174                    property_get("persist.camera.dumpmetadata", prop, "0");
1175                    int32_t enabled = atoi(prop);
1176                    if (enabled && metadata->is_tuning_params_valid) {
1177                        dumpMetadataToFile(metadata->tuning_params,
1178                               mMetaFrameCount,
1179                               enabled,
1180                               "Snapshot",
1181                               frame_number);
1182                    }
1183                }
1184
1185                //If it is a blob request then send the metadata to the picture channel
1186                metadata_buffer_t *reproc_meta =
1187                        (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
1188                if (reproc_meta == NULL) {
1189                    ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
1190                    goto done_metadata;
1191                }
1192                *reproc_meta = *metadata;
1193                mPictureChannel->queueReprocMetadata(reproc_meta);
1194            }
1195            // Return metadata buffer
1196            mMetadataChannel->bufDone(metadata_buf);
1197            free(metadata_buf);
1198        }
1199        if (!result.result) {
1200            ALOGE("%s: metadata is NULL", __func__);
1201        }
1202        result.frame_number = i->frame_number;
1203        result.num_output_buffers = 0;
1204        result.output_buffers = NULL;
1205        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1206                    j != i->buffers.end(); j++) {
1207            if (j->buffer) {
1208                result.num_output_buffers++;
1209            }
1210        }
1211
1212        if (result.num_output_buffers > 0) {
1213            camera3_stream_buffer_t *result_buffers =
1214                new camera3_stream_buffer_t[result.num_output_buffers];
1215            if (!result_buffers) {
1216                ALOGE("%s: Fatal error: out of memory", __func__);
1217            }
1218            size_t result_buffers_idx = 0;
1219            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1220                    j != i->buffers.end(); j++) {
1221                if (j->buffer) {
1222                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1223                            m != mPendingFrameDropList.end(); m++) {
1224                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1225                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1226                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1227                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1228                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1229                                  __func__, frame_number, streamID);
1230                            m = mPendingFrameDropList.erase(m);
1231                            break;
1232                        }
1233                    }
1234
1235                    for (List<PendingBufferInfo>::iterator k =
1236                      mPendingBuffersMap.mPendingBufferList.begin();
1237                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1238                      if (k->buffer == j->buffer->buffer) {
1239                        ALOGV("%s: Found buffer %p in pending buffer List "
1240                              "for frame %d, Take it out!!", __func__,
1241                               k->buffer, k->frame_number);
1242                        mPendingBuffersMap.num_buffers--;
1243                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
1244                        break;
1245                      }
1246                    }
1247
1248                    result_buffers[result_buffers_idx++] = *(j->buffer);
1249                    free(j->buffer);
1250                    j->buffer = NULL;
1251                }
1252            }
1253            result.output_buffers = result_buffers;
1254
1255            mCallbackOps->process_capture_result(mCallbackOps, &result);
1256            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1257                    __func__, result.frame_number, i->timestamp);
1258            free_camera_metadata((camera_metadata_t *)result.result);
1259            delete[] result_buffers;
1260        } else {
1261            mCallbackOps->process_capture_result(mCallbackOps, &result);
1262            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1263                        __func__, result.frame_number, i->timestamp);
1264            free_camera_metadata((camera_metadata_t *)result.result);
1265        }
1266        // erase the element from the list
1267        i = mPendingRequestsList.erase(i);
1268    }
1269
1270done_metadata:
1271    if (!pending_requests)
1272        unblockRequestIfNecessary();
1273
1274}
1275
1276/*===========================================================================
1277 * FUNCTION   : handleBufferWithLock
1278 *
1279 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1280 *
1281 * PARAMETERS : @buffer: image buffer for the callback
1282 *              @frame_number: frame number of the image buffer
1283 *
1284 * RETURN     :
1285 *
1286 *==========================================================================*/
1287void QCamera3HardwareInterface::handleBufferWithLock(
1288    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1289{
1290    // If the frame number doesn't exist in the pending request list,
1291    // directly send the buffer to the frameworks, and update pending buffers map
1292    // Otherwise, book-keep the buffer.
1293    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1294    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1295        i++;
1296    }
1297    if (i == mPendingRequestsList.end()) {
1298        // Verify all pending requests frame_numbers are greater
1299        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1300                j != mPendingRequestsList.end(); j++) {
1301            if (j->frame_number < frame_number) {
1302                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1303                        __func__, j->frame_number, frame_number);
1304            }
1305        }
1306        camera3_capture_result_t result;
1307        result.result = NULL;
1308        result.frame_number = frame_number;
1309        result.num_output_buffers = 1;
1310        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1311                m != mPendingFrameDropList.end(); m++) {
1312            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1313            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1314            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1315                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1316                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1317                        __func__, frame_number, streamID);
1318                m = mPendingFrameDropList.erase(m);
1319                break;
1320            }
1321        }
1322        result.output_buffers = buffer;
1323        ALOGV("%s: result frame_number = %d, buffer = %p",
1324                __func__, frame_number, buffer->buffer);
1325
1326        for (List<PendingBufferInfo>::iterator k =
1327                mPendingBuffersMap.mPendingBufferList.begin();
1328                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1329            if (k->buffer == buffer->buffer) {
1330                ALOGV("%s: Found Frame buffer, take it out from list",
1331                        __func__);
1332
1333                mPendingBuffersMap.num_buffers--;
1334                k = mPendingBuffersMap.mPendingBufferList.erase(k);
1335                break;
1336            }
1337        }
1338        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1339            __func__, mPendingBuffersMap.num_buffers);
1340
1341        mCallbackOps->process_capture_result(mCallbackOps, &result);
1342    } else {
1343        if (i->input_buffer_present) {
1344            camera3_capture_result result;
1345            result.result = NULL;
1346            result.frame_number = frame_number;
1347            result.num_output_buffers = 1;
1348            result.output_buffers = buffer;
1349            mCallbackOps->process_capture_result(mCallbackOps, &result);
1350            i = mPendingRequestsList.erase(i);
1351            mPendingRequest--;
1352            unblockRequestIfNecessary();
1353        } else {
1354            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1355                j != i->buffers.end(); j++) {
1356                if (j->stream == buffer->stream) {
1357                    if (j->buffer != NULL) {
1358                        ALOGE("%s: Error: buffer is already set", __func__);
1359                    } else {
1360                        j->buffer = (camera3_stream_buffer_t *)malloc(
1361                            sizeof(camera3_stream_buffer_t));
1362                        *(j->buffer) = *buffer;
1363                        ALOGV("%s: cache buffer %p at result frame_number %d",
1364                            __func__, buffer, frame_number);
1365                    }
1366                }
1367            }
1368        }
1369    }
1370}
1371
1372/*===========================================================================
1373 * FUNCTION   : unblockRequestIfNecessary
1374 *
1375 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1376 *              that mMutex is held when this function is called.
1377 *
1378 * PARAMETERS :
1379 *
1380 * RETURN     :
1381 *
1382 *==========================================================================*/
1383void QCamera3HardwareInterface::unblockRequestIfNecessary()
1384{
1385    bool max_buffers_dequeued = false;
1386
1387    uint32_t queued_buffers = 0;
1388    for(List<stream_info_t*>::iterator it=mStreamInfo.begin();
1389        it != mStreamInfo.end(); it++) {
1390        queued_buffers = 0;
1391        for (List<PendingBufferInfo>::iterator k =
1392            mPendingBuffersMap.mPendingBufferList.begin();
1393            k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1394            if (k->stream == (*it)->stream)
1395                queued_buffers++;
1396
1397            ALOGV("%s: Dequeued %d buffers for stream %p", __func__,
1398                queued_buffers, (*it)->stream);
1399            if (queued_buffers >=(* it)->stream->max_buffers) {
1400                ALOGV("%s: Wait!!! Max buffers Dequed", __func__);
1401                max_buffers_dequeued = true;
1402                break;
1403            }
1404        }
1405    }
1406
1407    if (!max_buffers_dequeued) {
1408        // Unblock process_capture_request
1409        pthread_cond_signal(&mRequestCond);
1410    }
1411}
1412
1413/*===========================================================================
1414 * FUNCTION   : registerStreamBuffers
1415 *
1416 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1417 *
1418 * PARAMETERS :
1419 *   @stream_list : streams to be configured
1420 *
1421 * RETURN     :
1422 *
1423 *==========================================================================*/
1424int QCamera3HardwareInterface::registerStreamBuffers(
1425        const camera3_stream_buffer_set_t * /*buffer_set*/)
1426{
1427    //Deprecated
1428    return NO_ERROR;
1429}
1430
1431/*===========================================================================
1432 * FUNCTION   : processCaptureRequest
1433 *
1434 * DESCRIPTION: process a capture request from camera service
1435 *
1436 * PARAMETERS :
1437 *   @request : request from framework to process
1438 *
1439 * RETURN     :
1440 *
1441 *==========================================================================*/
1442int QCamera3HardwareInterface::processCaptureRequest(
1443                    camera3_capture_request_t *request)
1444{
1445    int rc = NO_ERROR;
1446    int32_t request_id;
1447    CameraMetadata meta;
1448
1449    pthread_mutex_lock(&mMutex);
1450
1451    rc = validateCaptureRequest(request);
1452    if (rc != NO_ERROR) {
1453        ALOGE("%s: incoming request is not valid", __func__);
1454        pthread_mutex_unlock(&mMutex);
1455        return rc;
1456    }
1457
1458    meta = request->settings;
1459
1460    // For first capture request, send capture intent, and
1461    // stream on all streams
1462    if (mFirstRequest) {
1463
1464        for (size_t i = 0; i < request->num_output_buffers; i++) {
1465            const camera3_stream_buffer_t& output = request->output_buffers[i];
1466            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1467            rc = channel->registerBuffer(output.buffer);
1468            if (rc < 0) {
1469                ALOGE("%s: registerBuffer failed",
1470                        __func__);
1471                pthread_mutex_unlock(&mMutex);
1472                return -ENODEV;
1473            }
1474        }
1475
1476        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1477            int32_t hal_version = CAM_HAL_V3;
1478            uint8_t captureIntent =
1479                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1480
1481            memset(mParameters, 0, sizeof(metadata_buffer_t));
1482            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1483            AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1484                sizeof(hal_version), &hal_version);
1485            AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1486                sizeof(captureIntent), &captureIntent);
1487            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1488                mParameters);
1489        }
1490
1491        ALOGD("%s: Start META Channel", __func__);
1492        mMetadataChannel->start();
1493
1494        //First initialize all streams
1495        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1496            it != mStreamInfo.end(); it++) {
1497            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1498            rc = channel->initialize();
1499            if (NO_ERROR != rc) {
1500                ALOGE("%s : Channel initialization failed %d", __func__, rc);
1501                mMetadataChannel->stop();
1502                pthread_mutex_unlock(&mMutex);
1503                return rc;
1504            }
1505        }
1506        //Then start them
1507        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1508            it != mStreamInfo.end(); it++) {
1509            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1510            ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
1511            channel->start();
1512        }
1513    }
1514
1515    uint32_t frameNumber = request->frame_number;
1516    cam_stream_ID_t streamID;
1517
1518    if (meta.exists(ANDROID_REQUEST_ID)) {
1519        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1520        mCurrentRequestId = request_id;
1521        ALOGV("%s: Received request with id: %d",__func__, request_id);
1522    } else if (mFirstRequest || mCurrentRequestId == -1){
1523        ALOGE("%s: Unable to find request id field, \
1524                & no previous id available", __func__);
1525        return NAME_NOT_FOUND;
1526    } else {
1527        ALOGV("%s: Re-using old request id", __func__);
1528        request_id = mCurrentRequestId;
1529    }
1530
1531    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1532                                    __func__, __LINE__,
1533                                    request->num_output_buffers,
1534                                    request->input_buffer,
1535                                    frameNumber);
1536    // Acquire all request buffers first
1537    streamID.num_streams = 0;
1538    int blob_request = 0;
1539    for (size_t i = 0; i < request->num_output_buffers; i++) {
1540        const camera3_stream_buffer_t& output = request->output_buffers[i];
1541        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1542        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1543
1544        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1545            //Call function to store local copy of jpeg data for encode params.
1546            blob_request = 1;
1547        }
1548
1549        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1550        if (rc != OK) {
1551            ALOGE("%s: fence wait failed %d", __func__, rc);
1552            pthread_mutex_unlock(&mMutex);
1553            return rc;
1554        }
1555
1556        streamID.streamID[streamID.num_streams] =
1557            channel->getStreamID(channel->getStreamTypeMask());
1558        streamID.num_streams++;
1559    }
1560
1561    if(request->input_buffer == NULL) {
1562       rc = setFrameParameters(request, streamID);
1563        if (rc < 0) {
1564            ALOGE("%s: fail to set frame parameters", __func__);
1565            pthread_mutex_unlock(&mMutex);
1566            return rc;
1567        }
1568    }
1569
1570    /* Update pending request list and pending buffers map */
1571    PendingRequestInfo pendingRequest;
1572    pendingRequest.frame_number = frameNumber;
1573    pendingRequest.num_buffers = request->num_output_buffers;
1574    pendingRequest.request_id = request_id;
1575    pendingRequest.blob_request = blob_request;
1576    pendingRequest.bNotified = 0;
1577    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1578
1579    for (size_t i = 0; i < request->num_output_buffers; i++) {
1580        RequestedBufferInfo requestedBuf;
1581        requestedBuf.stream = request->output_buffers[i].stream;
1582        requestedBuf.buffer = NULL;
1583        pendingRequest.buffers.push_back(requestedBuf);
1584
1585        // Add to buffer handle the pending buffers list
1586        PendingBufferInfo bufferInfo;
1587        bufferInfo.frame_number = frameNumber;
1588        bufferInfo.buffer = request->output_buffers[i].buffer;
1589        bufferInfo.stream = request->output_buffers[i].stream;
1590        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1591        mPendingBuffersMap.num_buffers++;
1592        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1593          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1594          bufferInfo.stream->format);
1595    }
1596    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1597          __func__, mPendingBuffersMap.num_buffers);
1598    mPendingRequestsList.push_back(pendingRequest);
1599
1600    // Notify metadata channel we receive a request
1601    mMetadataChannel->request(NULL, frameNumber);
1602
1603    // Call request on other streams
1604    for (size_t i = 0; i < request->num_output_buffers; i++) {
1605        const camera3_stream_buffer_t& output = request->output_buffers[i];
1606        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1607        mm_camera_buf_def_t *pInputBuffer = NULL;
1608
1609        if (channel == NULL) {
1610            ALOGE("%s: invalid channel pointer for stream", __func__);
1611            continue;
1612        }
1613
1614        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1615            QCamera3RegularChannel* inputChannel = NULL;
1616            if(request->input_buffer != NULL){
1617
1618                //Try to get the internal format
1619                inputChannel = (QCamera3RegularChannel*)
1620                    request->input_buffer->stream->priv;
1621                if(inputChannel == NULL ){
1622                    ALOGE("%s: failed to get input channel handle", __func__);
1623                } else {
1624                    pInputBuffer =
1625                        inputChannel->getInternalFormatBuffer(
1626                                request->input_buffer->buffer);
1627                    ALOGD("%s: Input buffer dump",__func__);
1628                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1629                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1630                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1631                    ALOGD("Handle:%p", request->input_buffer->buffer);
1632                }
1633                rc = channel->request(output.buffer, frameNumber,
1634                            pInputBuffer, mParameters);
1635                if (rc < 0) {
1636                    ALOGE("%s: Fail to request on picture channel", __func__);
1637                    pthread_mutex_unlock(&mMutex);
1638                    return rc;
1639                }
1640
1641                rc = setReprocParameters(request);
1642                if (rc < 0) {
1643                    ALOGE("%s: fail to set reproc parameters", __func__);
1644                    pthread_mutex_unlock(&mMutex);
1645                    return rc;
1646                }
1647            } else
1648                rc = channel->request(output.buffer, frameNumber,
1649                            NULL, mParameters);
1650        } else {
1651            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1652                __LINE__, output.buffer, frameNumber);
1653           rc = channel->request(output.buffer, frameNumber);
1654        }
1655        if (rc < 0)
1656            ALOGE("%s: request failed", __func__);
1657    }
1658
1659    mFirstRequest = false;
1660    // Added a timed condition wait
1661    struct timespec ts;
1662    uint8_t isValidTimeout = 1;
1663    rc = clock_gettime(CLOCK_REALTIME, &ts);
1664    if (rc < 0) {
1665        isValidTimeout = 0;
1666        ALOGE("%s: Error reading the real time clock!!", __func__);
1667    }
1668    else {
1669        // Make timeout as 5 sec for request to be honored
1670        ts.tv_sec += 5;
1671    }
1672    //Block on conditional variable
1673    mPendingRequest++;
1674    while (mPendingRequest >= kMaxInFlight) {
1675        if (!isValidTimeout) {
1676            ALOGV("%s: Blocking on conditional wait", __func__);
1677            pthread_cond_wait(&mRequestCond, &mMutex);
1678        }
1679        else {
1680            ALOGV("%s: Blocking on timed conditional wait", __func__);
1681            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1682            if (rc == ETIMEDOUT) {
1683                rc = -ENODEV;
1684                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1685                break;
1686            }
1687        }
1688        ALOGV("%s: Unblocked", __func__);
1689    }
1690    pthread_mutex_unlock(&mMutex);
1691
1692    return rc;
1693}
1694
1695/*===========================================================================
1696 * FUNCTION   : dump
1697 *
1698 * DESCRIPTION:
1699 *
1700 * PARAMETERS :
1701 *
1702 *
1703 * RETURN     :
1704 *==========================================================================*/
1705void QCamera3HardwareInterface::dump(int /*fd*/)
1706{
1707    /*Enable lock when we implement this function*/
1708    /*
1709    pthread_mutex_lock(&mMutex);
1710
1711    pthread_mutex_unlock(&mMutex);
1712    */
1713    return;
1714}
1715
1716/*===========================================================================
1717 * FUNCTION   : flush
1718 *
1719 * DESCRIPTION:
1720 *
1721 * PARAMETERS :
1722 *
1723 *
1724 * RETURN     :
1725 *==========================================================================*/
1726int QCamera3HardwareInterface::flush()
1727{
1728
1729    unsigned int frameNum = 0;
1730    camera3_notify_msg_t notify_msg;
1731    camera3_capture_result_t result;
1732    camera3_stream_buffer_t pStream_Buf;
1733
1734    ALOGV("%s: Unblocking Process Capture Request", __func__);
1735
1736    // Stop the Streams/Channels
1737    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1738        it != mStreamInfo.end(); it++) {
1739        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1740        channel->stop();
1741        (*it)->status = INVALID;
1742    }
1743
1744    if (mMetadataChannel) {
1745        /* If content of mStreamInfo is not 0, there is metadata stream */
1746        mMetadataChannel->stop();
1747    }
1748
1749    // Mutex Lock
1750    pthread_mutex_lock(&mMutex);
1751
1752    // Unblock process_capture_request
1753    mPendingRequest = 0;
1754    pthread_cond_signal(&mRequestCond);
1755
1756    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1757    frameNum = i->frame_number;
1758    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
1759      __func__, frameNum);
1760
1761    // Go through the pending buffers and send buffer errors
1762    for (List<PendingBufferInfo>::iterator k =
1763         mPendingBuffersMap.mPendingBufferList.begin();
1764         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
1765         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1766          __func__, k->frame_number, k->buffer, k->stream,
1767          k->stream->format);
1768
1769        if (k->frame_number < frameNum) {
1770            // Send Error notify to frameworks for each buffer for which
1771            // metadata buffer is already sent
1772            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
1773              __func__, k->frame_number, k->buffer);
1774
1775            notify_msg.type = CAMERA3_MSG_ERROR;
1776            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1777            notify_msg.message.error.error_stream = k->stream;
1778            notify_msg.message.error.frame_number = k->frame_number;
1779            mCallbackOps->notify(mCallbackOps, &notify_msg);
1780            ALOGV("%s: notify frame_number = %d", __func__,
1781                    i->frame_number);
1782
1783            pStream_Buf.acquire_fence = -1;
1784            pStream_Buf.release_fence = -1;
1785            pStream_Buf.buffer = k->buffer;
1786            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1787            pStream_Buf.stream = k->stream;
1788
1789            result.result = NULL;
1790            result.frame_number = k->frame_number;
1791            result.num_output_buffers = 1;
1792            result.output_buffers = &pStream_Buf ;
1793            mCallbackOps->process_capture_result(mCallbackOps, &result);
1794
1795            mPendingBuffersMap.num_buffers--;
1796            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1797        }
1798        else {
1799          k++;
1800        }
1801    }
1802
1803    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1804
1805    // Go through the pending requests info and send error request to framework
1806    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
1807        int numBuffers = 0;
1808        ALOGV("%s:Sending ERROR REQUEST for frame %d",
1809              __func__, i->frame_number);
1810
1811        // Send shutter notify to frameworks
1812        notify_msg.type = CAMERA3_MSG_ERROR;
1813        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
1814        notify_msg.message.error.error_stream = NULL;
1815        notify_msg.message.error.frame_number = i->frame_number;
1816        mCallbackOps->notify(mCallbackOps, &notify_msg);
1817
1818        result.frame_number = i->frame_number;
1819        result.num_output_buffers = 0;
1820        result.output_buffers = NULL;
1821        numBuffers = 0;
1822
1823        for (List<PendingBufferInfo>::iterator k =
1824             mPendingBuffersMap.mPendingBufferList.begin();
1825             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
1826          if (k->frame_number == i->frame_number) {
1827            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
1828                   " stream = %p, stream format = %d",__func__,
1829                   k->frame_number, k->buffer, k->stream, k->stream->format);
1830
1831            pStream_Buf.acquire_fence = -1;
1832            pStream_Buf.release_fence = -1;
1833            pStream_Buf.buffer = k->buffer;
1834            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1835            pStream_Buf.stream = k->stream;
1836
1837            result.num_output_buffers = 1;
1838            result.output_buffers = &pStream_Buf;
1839            result.result = NULL;
1840            result.frame_number = i->frame_number;
1841
1842            mCallbackOps->process_capture_result(mCallbackOps, &result);
1843            mPendingBuffersMap.num_buffers--;
1844            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1845            numBuffers++;
1846          }
1847          else {
1848            k++;
1849          }
1850        }
1851        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1852              __func__, mPendingBuffersMap.num_buffers);
1853
1854        i = mPendingRequestsList.erase(i);
1855    }
1856
1857    /* Reset pending buffer list and requests list */
1858    mPendingRequestsList.clear();
1859    /* Reset pending frame Drop list and requests list */
1860    mPendingFrameDropList.clear();
1861
1862    mPendingBuffersMap.num_buffers = 0;
1863    mPendingBuffersMap.mPendingBufferList.clear();
1864    ALOGV("%s: Cleared all the pending buffers ", __func__);
1865
1866    mFirstRequest = true;
1867    pthread_mutex_unlock(&mMutex);
1868    return 0;
1869}
1870
1871/*===========================================================================
1872 * FUNCTION   : captureResultCb
1873 *
1874 * DESCRIPTION: Callback handler for all capture result
1875 *              (streams, as well as metadata)
1876 *
1877 * PARAMETERS :
1878 *   @metadata : metadata information
1879 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1880 *               NULL if metadata.
1881 *
1882 * RETURN     : NONE
1883 *==========================================================================*/
1884void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1885                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1886{
1887    pthread_mutex_lock(&mMutex);
1888
1889    /* Assume flush() is called before any reprocessing. Send
1890     * notify and result immediately upon receipt of any callback*/
1891    if (mLoopBackResult) {
1892        /* Send notify */
1893        camera3_notify_msg_t notify_msg;
1894        notify_msg.type = CAMERA3_MSG_SHUTTER;
1895        notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
1896        notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
1897        mCallbackOps->notify(mCallbackOps, &notify_msg);
1898
1899        /* Send capture result */
1900        mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
1901        free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
1902        free(mLoopBackResult);
1903        mLoopBackResult = NULL;
1904    }
1905
1906    if (metadata_buf)
1907        handleMetadataWithLock(metadata_buf);
1908    else
1909        handleBufferWithLock(buffer, frame_number);
1910
1911    pthread_mutex_unlock(&mMutex);
1912    return;
1913}
1914
1915/*===========================================================================
1916 * FUNCTION   : translateFromHalMetadata
1917 *
1918 * DESCRIPTION:
1919 *
1920 * PARAMETERS :
1921 *   @metadata : metadata information from callback
1922 *
1923 * RETURN     : camera_metadata_t*
1924 *              metadata in a format specified by fwk
1925 *==========================================================================*/
1926camera_metadata_t*
1927QCamera3HardwareInterface::translateFromHalMetadata
1928                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1929                                 int32_t request_id, int32_t blob)
1930{
1931    CameraMetadata camMetadata;
1932    camera_metadata_t* resultMetadata;
1933
1934    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1935    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1936
1937    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1938    uint8_t next_entry;
1939    while (curr_entry != CAM_INTF_PARM_MAX) {
1940       switch (curr_entry) {
1941         case CAM_INTF_META_FACE_DETECTION:{
1942             cam_face_detection_data_t *faceDetectionInfo =
1943                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1944             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1945             int32_t faceIds[MAX_ROI];
1946             uint8_t faceScores[MAX_ROI];
1947             int32_t faceRectangles[MAX_ROI * 4];
1948             int32_t faceLandmarks[MAX_ROI * 6];
1949             int j = 0, k = 0;
1950             for (int i = 0; i < numFaces; i++) {
1951                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1952                 faceScores[i] = faceDetectionInfo->faces[i].score;
1953                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1954                         faceRectangles+j, -1);
1955                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1956                 j+= 4;
1957                 k+= 6;
1958             }
1959
1960             if (numFaces <= 0) {
1961                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1962                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1963                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1964                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1965             }
1966
1967             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1968             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1969             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1970               faceRectangles, numFaces*4);
1971             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1972               faceLandmarks, numFaces*6);
1973
1974            break;
1975            }
1976         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1977             uint8_t  *color_correct_mode =
1978                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1979             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1980             break;
1981          }
1982
1983         // 3A state is sent in urgent partial result (uses quirk)
1984         case CAM_INTF_META_AEC_PRECAPTURE_ID:
1985         case CAM_INTF_META_AEC_ROI:
1986         case CAM_INTF_META_AEC_STATE:
1987         case CAM_INTF_PARM_FOCUS_MODE:
1988         case CAM_INTF_META_AF_ROI:
1989         case CAM_INTF_META_AF_STATE:
1990         case CAM_INTF_META_AF_TRIGGER_ID:
1991         case CAM_INTF_PARM_WHITE_BALANCE:
1992         case CAM_INTF_META_AWB_REGIONS:
1993         case CAM_INTF_META_AWB_STATE:
1994         case CAM_INTF_META_MODE: {
1995           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
1996           break;
1997         }
1998
1999          case CAM_INTF_META_EDGE_MODE: {
2000             cam_edge_application_t  *edgeApplication =
2001                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
2002             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2003             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2004             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2005             break;
2006          }
2007          case CAM_INTF_META_FLASH_POWER: {
2008             uint8_t  *flashPower =
2009                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2010             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2011             break;
2012          }
2013          case CAM_INTF_META_FLASH_FIRING_TIME: {
2014             int64_t  *flashFiringTime =
2015                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2016             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2017             break;
2018          }
2019          case CAM_INTF_META_FLASH_STATE: {
2020             uint8_t  *flashState =
2021                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
2022             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
2023             break;
2024          }
2025          case CAM_INTF_META_FLASH_MODE:{
2026             uint8_t *flashMode = (uint8_t*)
2027                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
2028             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
2029             break;
2030          }
2031          case CAM_INTF_META_HOTPIXEL_MODE: {
2032              uint8_t  *hotPixelMode =
2033                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2034              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2035              break;
2036          }
2037          case CAM_INTF_META_LENS_APERTURE:{
2038             float  *lensAperture =
2039                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2040             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2041             break;
2042          }
2043          case CAM_INTF_META_LENS_FILTERDENSITY: {
2044             float  *filterDensity =
2045                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2046             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2047             break;
2048          }
2049          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2050             float  *focalLength =
2051                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2052             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2053             break;
2054          }
2055          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2056             float  *focusDistance =
2057                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2058             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2059             break;
2060          }
2061          case CAM_INTF_META_LENS_FOCUS_RANGE: {
2062             float  *focusRange =
2063                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2064             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2065             break;
2066          }
2067          case CAM_INTF_META_LENS_STATE: {
2068             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2069             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2070             break;
2071          }
2072          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2073             uint8_t  *opticalStab =
2074                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2075             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2076             break;
2077          }
2078          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2079             uint8_t  *noiseRedMode =
2080                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2081             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2082             break;
2083          }
2084          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2085             uint8_t  *noiseRedStrength =
2086                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2087             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2088             break;
2089          }
2090          case CAM_INTF_META_SCALER_CROP_REGION: {
2091             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2092             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2093             int32_t scalerCropRegion[4];
2094             scalerCropRegion[0] = hScalerCropRegion->left;
2095             scalerCropRegion[1] = hScalerCropRegion->top;
2096             scalerCropRegion[2] = hScalerCropRegion->width;
2097             scalerCropRegion[3] = hScalerCropRegion->height;
2098             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2099             break;
2100          }
2101          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2102             int64_t  *sensorExpTime =
2103                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2104             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2105             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2106             break;
2107          }
2108          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2109             int64_t  *sensorFameDuration =
2110                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2111             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2112             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2113             break;
2114          }
2115          case CAM_INTF_META_SENSOR_SENSITIVITY:{
2116             int32_t  *sensorSensitivity =
2117                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2118             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
2119             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
2120             break;
2121          }
2122          case CAM_INTF_META_SHADING_MODE: {
2123             uint8_t  *shadingMode =
2124                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2125             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2126             break;
2127          }
2128          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2129             uint8_t  *faceDetectMode =
2130                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2131             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2132                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2133                                                        *faceDetectMode);
2134             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2135             break;
2136          }
2137          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2138             uint8_t  *histogramMode =
2139                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2140             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2141             break;
2142          }
2143          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2144               uint8_t  *sharpnessMapMode =
2145                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2146               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2147                                  sharpnessMapMode, 1);
2148               break;
2149           }
2150          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2151               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2152               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2153               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2154                                  (int32_t*)sharpnessMap->sharpness,
2155                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2156               break;
2157          }
2158          case CAM_INTF_META_LENS_SHADING_MAP: {
2159               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2160               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2161               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2162               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2163               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2164                                  (float*)lensShadingMap->lens_shading,
2165                                  4*map_width*map_height);
2166               break;
2167          }
2168
2169          case CAM_INTF_META_TONEMAP_MODE: {
2170             uint8_t  *toneMapMode =
2171                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2172             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2173             break;
2174          }
2175
2176          case CAM_INTF_META_TONEMAP_CURVES:{
2177             //Populate CAM_INTF_META_TONEMAP_CURVES
2178             /* ch0 = G, ch 1 = B, ch 2 = R*/
2179             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2180             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2181             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2182                                (float*)tonemap->curves[0].tonemap_points,
2183                                tonemap->tonemap_points_cnt * 2);
2184
2185             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2186                                (float*)tonemap->curves[1].tonemap_points,
2187                                tonemap->tonemap_points_cnt * 2);
2188
2189             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2190                                (float*)tonemap->curves[2].tonemap_points,
2191                                tonemap->tonemap_points_cnt * 2);
2192             break;
2193          }
2194          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2195             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2196             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2197             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2198             break;
2199          }
2200          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2201              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2202              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2203              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2204                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2205              break;
2206          }
2207          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2208             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2209             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2210             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2211                       predColorCorrectionGains->gains, 4);
2212             break;
2213          }
2214          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2215             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2216                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2217             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2218                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2219             break;
2220
2221          }
2222          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2223             uint8_t *blackLevelLock = (uint8_t*)
2224               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2225             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2226             break;
2227          }
2228          case CAM_INTF_META_SCENE_FLICKER:{
2229             uint8_t *sceneFlicker = (uint8_t*)
2230             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2231             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2232             break;
2233          }
2234          case CAM_INTF_PARM_LED_MODE:
2235             break;
2236          case CAM_INTF_PARM_EFFECT: {
2237             uint8_t *effectMode = (uint8_t*)
2238                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2239             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2240                                                    sizeof(EFFECT_MODES_MAP),
2241                                                    *effectMode);
2242             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2243             break;
2244          }
2245          case CAM_INTF_META_TEST_PATTERN_DATA: {
2246             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2247                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2248             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2249                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2250                     testPatternData->mode);
2251             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2252                     &fwk_testPatternMode, 1);
2253             break;
2254          }
2255          case CAM_INTF_META_JPEG_GPS_COORDINATES: {
2256              double *gps_coords = (double *)POINTER_OF(
2257                      CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2258              camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
2259              break;
2260          }
2261          case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
2262              char *gps_methods = (char *)POINTER_OF(
2263                      CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2264              String8 str(gps_methods);
2265              camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2266              break;
2267          }
2268          case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
2269              int64_t *gps_timestamp = (int64_t *)POINTER_OF(
2270                      CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2271              camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
2272              break;
2273          }
2274          case CAM_INTF_META_JPEG_ORIENTATION: {
2275              int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
2276                      CAM_INTF_META_JPEG_ORIENTATION, metadata);
2277              camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
2278              break;
2279          }
2280          case CAM_INTF_META_JPEG_QUALITY: {
2281              uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
2282                      CAM_INTF_META_JPEG_QUALITY, metadata);
2283              camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
2284              break;
2285          }
2286          case CAM_INTF_META_JPEG_THUMB_QUALITY: {
2287              uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
2288                      CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2289              camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
2290              break;
2291          }
2292
2293          case CAM_INTF_META_JPEG_THUMB_SIZE: {
2294              cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
2295                      CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2296              camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
2297              break;
2298          }
2299
2300             break;
2301          case CAM_INTF_META_PRIVATE_DATA: {
2302             uint8_t *privateData = (uint8_t *)
2303                 POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
2304             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
2305                 privateData, MAX_METADATA_PAYLOAD_SIZE);
2306             break;
2307          }
2308          default:
2309             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2310                   __func__, curr_entry);
2311             break;
2312       }
2313       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2314       curr_entry = next_entry;
2315    }
2316
2317    int32_t hotPixelMap[2];
2318    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2319
2320    resultMetadata = camMetadata.release();
2321    return resultMetadata;
2322}
2323
2324/*===========================================================================
2325 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2326 *
2327 * DESCRIPTION:
2328 *
2329 * PARAMETERS :
2330 *   @metadata : metadata information from callback
2331 *
2332 * RETURN     : camera_metadata_t*
2333 *              metadata in a format specified by fwk
2334 *==========================================================================*/
2335camera_metadata_t*
2336QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2337                                (metadata_buffer_t *metadata) {
2338
2339    CameraMetadata camMetadata;
2340    camera_metadata_t* resultMetadata;
2341
2342    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2343    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2344
2345    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2346    uint8_t next_entry;
2347    while (curr_entry != CAM_INTF_PARM_MAX) {
2348      switch (curr_entry) {
2349        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2350            int32_t  *ae_precapture_id =
2351              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2352            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2353                                          ae_precapture_id, 1);
2354            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2355          break;
2356        }
2357        case CAM_INTF_META_AEC_ROI: {
2358            cam_area_t  *hAeRegions =
2359                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2360            int32_t aeRegions[5];
2361            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2362            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2363            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
2364            break;
2365        }
2366        case CAM_INTF_META_AEC_STATE:{
2367            uint8_t *ae_state =
2368                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2369            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2370            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2371            break;
2372        }
2373        case CAM_INTF_PARM_FOCUS_MODE:{
2374            uint8_t  *focusMode =
2375                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2376            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2377               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2378            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2379            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2380            break;
2381        }
2382        case CAM_INTF_META_AF_ROI:{
2383            /*af regions*/
2384            cam_area_t  *hAfRegions =
2385                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2386            int32_t afRegions[5];
2387            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2388            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2389            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
2390            break;
2391        }
2392        case CAM_INTF_META_AF_STATE: {
2393            uint8_t  *afState =
2394               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2395            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2396            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2397            break;
2398        }
2399        case CAM_INTF_META_AF_TRIGGER_ID: {
2400            int32_t  *afTriggerId =
2401                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2402            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2403            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2404            break;
2405        }
2406        case CAM_INTF_PARM_WHITE_BALANCE: {
2407           uint8_t  *whiteBalance =
2408                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2409             uint8_t fwkWhiteBalanceMode =
2410                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2411                    sizeof(WHITE_BALANCE_MODES_MAP)/
2412                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2413             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2414                 &fwkWhiteBalanceMode, 1);
2415            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2416             break;
2417        }
2418        case CAM_INTF_META_AWB_REGIONS: {
2419           /*awb regions*/
2420           cam_area_t  *hAwbRegions =
2421               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2422           int32_t awbRegions[5];
2423           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2424           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2425           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2426           break;
2427        }
2428        case CAM_INTF_META_AWB_STATE: {
2429           uint8_t  *whiteBalanceState =
2430              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2431           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2432           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2433           break;
2434        }
2435        case CAM_INTF_META_MODE: {
2436            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2437            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2438            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
2439            break;
2440        }
2441        default:
2442            ALOGV("%s: Normal Metadata %d, do not process",
2443              __func__, curr_entry);
2444       }
2445       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2446       curr_entry = next_entry;
2447    }
2448    resultMetadata = camMetadata.release();
2449    return resultMetadata;
2450}
2451
2452/*===========================================================================
2453 * FUNCTION   : dumpMetadataToFile
2454 *
2455 * DESCRIPTION: Dumps tuning metadata to file system
2456 *
2457 * PARAMETERS :
2458 *   @meta           : tuning metadata
2459 *   @dumpFrameCount : current dump frame count
2460 *   @enabled        : Enable mask
2461 *
2462 *==========================================================================*/
2463void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
2464                                                   uint32_t &dumpFrameCount,
2465                                                   int32_t enabled,
2466                                                   const char *type,
2467                                                   uint32_t frameNumber)
2468{
2469    uint32_t frm_num = 0;
2470
2471    //Some sanity checks
2472    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
2473        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
2474              __func__,
2475              meta.tuning_sensor_data_size,
2476              TUNING_SENSOR_DATA_MAX);
2477        return;
2478    }
2479
2480    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
2481        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
2482              __func__,
2483              meta.tuning_vfe_data_size,
2484              TUNING_VFE_DATA_MAX);
2485        return;
2486    }
2487
2488    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
2489        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
2490              __func__,
2491              meta.tuning_cpp_data_size,
2492              TUNING_CPP_DATA_MAX);
2493        return;
2494    }
2495
2496    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
2497        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
2498              __func__,
2499              meta.tuning_cac_data_size,
2500              TUNING_CAC_DATA_MAX);
2501        return;
2502    }
2503    //
2504
2505    if(enabled){
2506        frm_num = ((enabled & 0xffff0000) >> 16);
2507        if(frm_num == 0) {
2508            frm_num = 10; //default 10 frames
2509        }
2510        if(frm_num > 256) {
2511            frm_num = 256; //256 buffers cycle around
2512        }
2513        if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
2514            // reset frame count if cycling
2515            dumpFrameCount = 0;
2516        }
2517        ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
2518        if (dumpFrameCount < frm_num) {
2519            char timeBuf[FILENAME_MAX];
2520            char buf[FILENAME_MAX];
2521            memset(buf, 0, sizeof(buf));
2522            memset(timeBuf, 0, sizeof(timeBuf));
2523            time_t current_time;
2524            struct tm * timeinfo;
2525            time (&current_time);
2526            timeinfo = localtime (&current_time);
2527            strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
2528            String8 filePath(timeBuf);
2529            snprintf(buf,
2530                     sizeof(buf),
2531                     "%d_HAL_META_%s_%d.bin",
2532                     dumpFrameCount,
2533                     type,
2534                     frameNumber);
2535            filePath.append(buf);
2536            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2537            if (file_fd > 0) {
2538                int written_len = 0;
2539                meta.tuning_data_version = TUNING_DATA_VERSION;
2540                void *data = (void *)((uint8_t *)&meta.tuning_data_version);
2541                written_len += write(file_fd, data, sizeof(uint32_t));
2542                data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
2543                ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
2544                written_len += write(file_fd, data, sizeof(uint32_t));
2545                data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
2546                ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
2547                written_len += write(file_fd, data, sizeof(uint32_t));
2548                data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
2549                ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
2550                written_len += write(file_fd, data, sizeof(uint32_t));
2551                data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
2552                ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
2553                written_len += write(file_fd, data, sizeof(uint32_t));
2554                int total_size = meta.tuning_sensor_data_size;
2555                data = (void *)((uint8_t *)&meta.data);
2556                written_len += write(file_fd, data, total_size);
2557                total_size = meta.tuning_vfe_data_size;
2558                data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
2559                written_len += write(file_fd, data, total_size);
2560                total_size = meta.tuning_cpp_data_size;
2561                data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
2562                written_len += write(file_fd, data, total_size);
2563                total_size = meta.tuning_cac_data_size;
2564                data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
2565                written_len += write(file_fd, data, total_size);
2566                close(file_fd);
2567            }else {
2568                ALOGE("%s: fail t open file for image dumping", __func__);
2569            }
2570            dumpFrameCount++;
2571        }
2572    }
2573}
2574
2575/*===========================================================================
2576 * FUNCTION   : convertToRegions
2577 *
2578 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2579 *
2580 * PARAMETERS :
2581 *   @rect   : cam_rect_t struct to convert
2582 *   @region : int32_t destination array
2583 *   @weight : if we are converting from cam_area_t, weight is valid
2584 *             else weight = -1
2585 *
2586 *==========================================================================*/
2587void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2588    region[0] = rect.left;
2589    region[1] = rect.top;
2590    region[2] = rect.left + rect.width;
2591    region[3] = rect.top + rect.height;
2592    if (weight > -1) {
2593        region[4] = weight;
2594    }
2595}
2596
2597/*===========================================================================
2598 * FUNCTION   : convertFromRegions
2599 *
2600 * DESCRIPTION: helper method to convert from array to cam_rect_t
2601 *
2602 * PARAMETERS :
2603 *   @rect   : cam_rect_t struct to convert
2604 *   @region : int32_t destination array
2605 *   @weight : if we are converting from cam_area_t, weight is valid
2606 *             else weight = -1
2607 *
2608 *==========================================================================*/
2609void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2610                                                   const camera_metadata_t *settings,
2611                                                   uint32_t tag){
2612    CameraMetadata frame_settings;
2613    frame_settings = settings;
2614    int32_t x_min = frame_settings.find(tag).data.i32[0];
2615    int32_t y_min = frame_settings.find(tag).data.i32[1];
2616    int32_t x_max = frame_settings.find(tag).data.i32[2];
2617    int32_t y_max = frame_settings.find(tag).data.i32[3];
2618    roi->weight = frame_settings.find(tag).data.i32[4];
2619    roi->rect.left = x_min;
2620    roi->rect.top = y_min;
2621    roi->rect.width = x_max - x_min;
2622    roi->rect.height = y_max - y_min;
2623}
2624
2625/*===========================================================================
2626 * FUNCTION   : resetIfNeededROI
2627 *
2628 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2629 *              crop region
2630 *
2631 * PARAMETERS :
2632 *   @roi       : cam_area_t struct to resize
2633 *   @scalerCropRegion : cam_crop_region_t region to compare against
2634 *
2635 *
2636 *==========================================================================*/
2637bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2638                                                 const cam_crop_region_t* scalerCropRegion)
2639{
2640    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2641    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2642    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2643    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2644    if ((roi_x_max < scalerCropRegion->left) ||
2645        (roi_y_max < scalerCropRegion->top)  ||
2646        (roi->rect.left > crop_x_max) ||
2647        (roi->rect.top > crop_y_max)){
2648        return false;
2649    }
2650    if (roi->rect.left < scalerCropRegion->left) {
2651        roi->rect.left = scalerCropRegion->left;
2652    }
2653    if (roi->rect.top < scalerCropRegion->top) {
2654        roi->rect.top = scalerCropRegion->top;
2655    }
2656    if (roi_x_max > crop_x_max) {
2657        roi_x_max = crop_x_max;
2658    }
2659    if (roi_y_max > crop_y_max) {
2660        roi_y_max = crop_y_max;
2661    }
2662    roi->rect.width = roi_x_max - roi->rect.left;
2663    roi->rect.height = roi_y_max - roi->rect.top;
2664    return true;
2665}
2666
2667/*===========================================================================
2668 * FUNCTION   : convertLandmarks
2669 *
2670 * DESCRIPTION: helper method to extract the landmarks from face detection info
2671 *
2672 * PARAMETERS :
2673 *   @face   : cam_rect_t struct to convert
2674 *   @landmarks : int32_t destination array
2675 *
2676 *
2677 *==========================================================================*/
2678void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2679{
2680    landmarks[0] = face.left_eye_center.x;
2681    landmarks[1] = face.left_eye_center.y;
2682    landmarks[2] = face.right_eye_center.x;
2683    landmarks[3] = face.right_eye_center.y;
2684    landmarks[4] = face.mouth_center.x;
2685    landmarks[5] = face.mouth_center.y;
2686}
2687
2688#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2689/*===========================================================================
2690 * FUNCTION   : initCapabilities
2691 *
2692 * DESCRIPTION: initialize camera capabilities in static data struct
2693 *
2694 * PARAMETERS :
2695 *   @cameraId  : camera Id
2696 *
2697 * RETURN     : int32_t type of status
2698 *              NO_ERROR  -- success
2699 *              none-zero failure code
2700 *==========================================================================*/
2701int QCamera3HardwareInterface::initCapabilities(int cameraId)
2702{
2703    int rc = 0;
2704    mm_camera_vtbl_t *cameraHandle = NULL;
2705    QCamera3HeapMemory *capabilityHeap = NULL;
2706
2707    cameraHandle = camera_open(cameraId);
2708    if (!cameraHandle) {
2709        ALOGE("%s: camera_open failed", __func__);
2710        rc = -1;
2711        goto open_failed;
2712    }
2713
2714    capabilityHeap = new QCamera3HeapMemory();
2715    if (capabilityHeap == NULL) {
2716        ALOGE("%s: creation of capabilityHeap failed", __func__);
2717        goto heap_creation_failed;
2718    }
2719    /* Allocate memory for capability buffer */
2720    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2721    if(rc != OK) {
2722        ALOGE("%s: No memory for cappability", __func__);
2723        goto allocate_failed;
2724    }
2725
2726    /* Map memory for capability buffer */
2727    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2728    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2729                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2730                                capabilityHeap->getFd(0),
2731                                sizeof(cam_capability_t));
2732    if(rc < 0) {
2733        ALOGE("%s: failed to map capability buffer", __func__);
2734        goto map_failed;
2735    }
2736
2737    /* Query Capability */
2738    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2739    if(rc < 0) {
2740        ALOGE("%s: failed to query capability",__func__);
2741        goto query_failed;
2742    }
2743    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2744    if (!gCamCapability[cameraId]) {
2745        ALOGE("%s: out of memory", __func__);
2746        goto query_failed;
2747    }
2748    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2749                                        sizeof(cam_capability_t));
2750    rc = 0;
2751
2752query_failed:
2753    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2754                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2755map_failed:
2756    capabilityHeap->deallocate();
2757allocate_failed:
2758    delete capabilityHeap;
2759heap_creation_failed:
2760    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2761    cameraHandle = NULL;
2762open_failed:
2763    return rc;
2764}
2765
2766/*===========================================================================
2767 * FUNCTION   : initParameters
2768 *
2769 * DESCRIPTION: initialize camera parameters
2770 *
2771 * PARAMETERS :
2772 *
2773 * RETURN     : int32_t type of status
2774 *              NO_ERROR  -- success
2775 *              none-zero failure code
2776 *==========================================================================*/
2777int QCamera3HardwareInterface::initParameters()
2778{
2779    int rc = 0;
2780
2781    //Allocate Set Param Buffer
2782    mParamHeap = new QCamera3HeapMemory();
2783    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
2784    if(rc != OK) {
2785        rc = NO_MEMORY;
2786        ALOGE("Failed to allocate SETPARM Heap memory");
2787        delete mParamHeap;
2788        mParamHeap = NULL;
2789        return rc;
2790    }
2791
2792    //Map memory for parameters buffer
2793    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2794            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2795            mParamHeap->getFd(0),
2796            sizeof(metadata_buffer_t));
2797    if(rc < 0) {
2798        ALOGE("%s:failed to map SETPARM buffer",__func__);
2799        rc = FAILED_TRANSACTION;
2800        mParamHeap->deallocate();
2801        delete mParamHeap;
2802        mParamHeap = NULL;
2803        return rc;
2804    }
2805
2806    mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
2807    return rc;
2808}
2809
2810/*===========================================================================
2811 * FUNCTION   : deinitParameters
2812 *
2813 * DESCRIPTION: de-initialize camera parameters
2814 *
2815 * PARAMETERS :
2816 *
2817 * RETURN     : NONE
2818 *==========================================================================*/
2819void QCamera3HardwareInterface::deinitParameters()
2820{
2821    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2822            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2823
2824    mParamHeap->deallocate();
2825    delete mParamHeap;
2826    mParamHeap = NULL;
2827
2828    mParameters = NULL;
2829}
2830
2831/*===========================================================================
2832 * FUNCTION   : calcMaxJpegSize
2833 *
2834 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2835 *
2836 * PARAMETERS :
2837 *
2838 * RETURN     : max_jpeg_size
2839 *==========================================================================*/
2840int QCamera3HardwareInterface::calcMaxJpegSize()
2841{
2842    int32_t max_jpeg_size = 0;
2843    int temp_width, temp_height;
2844    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2845        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2846        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2847        if (temp_width * temp_height > max_jpeg_size ) {
2848            max_jpeg_size = temp_width * temp_height;
2849        }
2850    }
2851    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2852    return max_jpeg_size;
2853}
2854
2855/*===========================================================================
2856 * FUNCTION   : initStaticMetadata
2857 *
2858 * DESCRIPTION: initialize the static metadata
2859 *
2860 * PARAMETERS :
2861 *   @cameraId  : camera Id
2862 *
2863 * RETURN     : int32_t type of status
2864 *              0  -- success
2865 *              non-zero failure code
2866 *==========================================================================*/
2867int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2868{
2869    int rc = 0;
2870    CameraMetadata staticInfo;
2871
2872    /* android.info: hardware level */
2873    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2874    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2875        &supportedHardwareLevel, 1);
2876
2877    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2878    /*HAL 3 only*/
2879    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2880                    &gCamCapability[cameraId]->min_focus_distance, 1);
2881
2882    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2883                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2884
2885    /*should be using focal lengths but sensor doesn't provide that info now*/
2886    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2887                      &gCamCapability[cameraId]->focal_length,
2888                      1);
2889
2890    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2891                      gCamCapability[cameraId]->apertures,
2892                      gCamCapability[cameraId]->apertures_count);
2893
2894    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2895                gCamCapability[cameraId]->filter_densities,
2896                gCamCapability[cameraId]->filter_densities_count);
2897
2898
2899    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2900                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2901                      gCamCapability[cameraId]->optical_stab_modes_count);
2902
2903    staticInfo.update(ANDROID_LENS_POSITION,
2904                      gCamCapability[cameraId]->lens_position,
2905                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2906
2907    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2908                                       gCamCapability[cameraId]->lens_shading_map_size.height};
2909    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2910                      lens_shading_map_size,
2911                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2912
2913    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2914            gCamCapability[cameraId]->sensor_physical_size, 2);
2915
2916    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2917            gCamCapability[cameraId]->exposure_time_range, 2);
2918
2919    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2920            &gCamCapability[cameraId]->max_frame_duration, 1);
2921
2922    camera_metadata_rational baseGainFactor = {
2923            gCamCapability[cameraId]->base_gain_factor.numerator,
2924            gCamCapability[cameraId]->base_gain_factor.denominator};
2925    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2926                      &baseGainFactor, 1);
2927
2928    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2929                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2930
2931    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2932                                  gCamCapability[cameraId]->pixel_array_size.height};
2933    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2934                      pixel_array_size, 2);
2935
2936    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
2937                                                gCamCapability[cameraId]->active_array_size.top,
2938                                                gCamCapability[cameraId]->active_array_size.width,
2939                                                gCamCapability[cameraId]->active_array_size.height};
2940    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2941                      active_array_size, 4);
2942
2943    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2944            &gCamCapability[cameraId]->white_level, 1);
2945
2946    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2947            gCamCapability[cameraId]->black_level_pattern, 4);
2948
2949    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2950                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2951
2952    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2953                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2954
2955    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
2956    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2957                      (int32_t*)&maxFaces, 1);
2958
2959    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2960                      &gCamCapability[cameraId]->histogram_size, 1);
2961
2962    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2963            &gCamCapability[cameraId]->max_histogram_count, 1);
2964
2965    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2966                                    gCamCapability[cameraId]->sharpness_map_size.height};
2967
2968    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2969            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2970
2971    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2972            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2973
2974
2975    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2976                      &gCamCapability[cameraId]->raw_min_duration[0],
2977                       gCamCapability[cameraId]->supported_raw_dim_cnt);
2978
2979    int32_t scalar_formats[] = {
2980            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
2981            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
2982            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
2983            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
2984            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
2985    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2986    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2987                      scalar_formats,
2988                      scalar_formats_count);
2989
2990    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2991    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2992              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2993              available_processed_sizes);
2994    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2995                available_processed_sizes,
2996                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2997
2998    int32_t available_raw_sizes[CAM_FORMAT_MAX * 2];
2999    makeTable(gCamCapability[cameraId]->raw_dim,
3000              gCamCapability[cameraId]->supported_raw_dim_cnt,
3001              available_raw_sizes);
3002    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
3003                available_raw_sizes,
3004                gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
3005
3006    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
3007                      &gCamCapability[cameraId]->picture_min_duration[0],
3008                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
3009
3010    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
3011    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
3012                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
3013                 available_fps_ranges);
3014    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3015            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
3016
3017    camera_metadata_rational exposureCompensationStep = {
3018            gCamCapability[cameraId]->exp_compensation_step.numerator,
3019            gCamCapability[cameraId]->exp_compensation_step.denominator};
3020    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
3021                      &exposureCompensationStep, 1);
3022
3023    /*TO DO*/
3024    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
3025    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3026                      availableVstabModes, sizeof(availableVstabModes));
3027
3028    /** Quirk for urgent 3A state until final interface is worked out */
3029    uint8_t usePartialResultQuirk = 1;
3030    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
3031                      &usePartialResultQuirk, 1);
3032
3033    /*HAL 1 and HAL 3 common*/
3034    float maxZoom = 4;
3035    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3036            &maxZoom, 1);
3037
3038    int32_t max3aRegions[] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
3039    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
3040            max3aRegions, 3);
3041
3042    uint8_t availableFaceDetectModes[] = {
3043            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
3044            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
3045    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3046                      availableFaceDetectModes,
3047                      sizeof(availableFaceDetectModes));
3048
3049    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
3050                                           gCamCapability[cameraId]->exposure_compensation_max};
3051    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
3052            exposureCompensationRange,
3053            sizeof(exposureCompensationRange)/sizeof(int32_t));
3054
3055    uint8_t lensFacing = (facingBack) ?
3056            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
3057    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
3058
3059    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
3060                available_processed_sizes,
3061                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
3062
3063    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3064                      available_thumbnail_sizes,
3065                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
3066
3067    /*all sizes will be clubbed into this tag*/
3068    int32_t available_stream_configs_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3069                                    sizeof(scalar_formats)/sizeof(int32_t) * 4;
3070    int32_t available_stream_configs[available_stream_configs_size];
3071    int idx = 0;
3072    for (int j = 0; j < scalar_formats_count; j++) {
3073        for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3074           available_stream_configs[idx] = scalar_formats[j];
3075           available_stream_configs[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3076           available_stream_configs[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3077           available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3078           idx+=4;
3079        }
3080    }
3081
3082    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3083                      available_stream_configs,
3084                      available_stream_configs_size);
3085
3086
3087
3088    int32_t max_jpeg_size = 0;
3089    int temp_width, temp_height;
3090    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3091        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3092        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3093        if (temp_width * temp_height > max_jpeg_size ) {
3094            max_jpeg_size = temp_width * temp_height;
3095        }
3096    }
3097    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3098    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
3099                      &max_jpeg_size, 1);
3100
3101    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
3102    size_t size = 0;
3103    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
3104        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
3105                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
3106                                   gCamCapability[cameraId]->supported_effects[i]);
3107        if (val != NAME_NOT_FOUND) {
3108            avail_effects[size] = (uint8_t)val;
3109            size++;
3110        }
3111    }
3112    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
3113                      avail_effects,
3114                      size);
3115
3116    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
3117    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
3118    int32_t supported_scene_modes_cnt = 0;
3119    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3120        int32_t val = lookupFwkName(SCENE_MODES_MAP,
3121                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3122                                gCamCapability[cameraId]->supported_scene_modes[i]);
3123        if (val != NAME_NOT_FOUND) {
3124            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3125            supported_indexes[supported_scene_modes_cnt] = i;
3126            supported_scene_modes_cnt++;
3127        }
3128    }
3129
3130    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3131                      avail_scene_modes,
3132                      supported_scene_modes_cnt);
3133
3134    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3135    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3136                      supported_scene_modes_cnt,
3137                      scene_mode_overrides,
3138                      supported_indexes,
3139                      cameraId);
3140    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3141                      scene_mode_overrides,
3142                      supported_scene_modes_cnt*3);
3143
3144    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3145    size = 0;
3146    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3147        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3148                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3149                                 gCamCapability[cameraId]->supported_antibandings[i]);
3150        if (val != NAME_NOT_FOUND) {
3151            avail_antibanding_modes[size] = (uint8_t)val;
3152            size++;
3153        }
3154
3155    }
3156    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3157                      avail_antibanding_modes,
3158                      size);
3159
3160    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3161    size = 0;
3162    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3163        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3164                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3165                                gCamCapability[cameraId]->supported_focus_modes[i]);
3166        if (val != NAME_NOT_FOUND) {
3167            avail_af_modes[size] = (uint8_t)val;
3168            size++;
3169        }
3170    }
3171    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3172                      avail_af_modes,
3173                      size);
3174
3175    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3176    size = 0;
3177    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3178        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3179                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3180                                    gCamCapability[cameraId]->supported_white_balances[i]);
3181        if (val != NAME_NOT_FOUND) {
3182            avail_awb_modes[size] = (uint8_t)val;
3183            size++;
3184        }
3185    }
3186    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3187                      avail_awb_modes,
3188                      size);
3189
3190    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3191    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3192      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3193
3194    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3195            available_flash_levels,
3196            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3197
3198    uint8_t flashAvailable;
3199    if (gCamCapability[cameraId]->flash_available)
3200        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3201    else
3202        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3203    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3204            &flashAvailable, 1);
3205
3206    uint8_t avail_ae_modes[5];
3207    size = 0;
3208    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3209        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3210        size++;
3211    }
3212    if (flashAvailable) {
3213        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3214        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3215        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3216    }
3217    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3218                      avail_ae_modes,
3219                      size);
3220
3221    int32_t sensitivity_range[2];
3222    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3223    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3224    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3225                      sensitivity_range,
3226                      sizeof(sensitivity_range) / sizeof(int32_t));
3227
3228    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3229                      &gCamCapability[cameraId]->max_analog_sensitivity,
3230                      1);
3231
3232    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
3233                      &gCamCapability[cameraId]->picture_min_duration[0],
3234                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
3235
3236    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3237    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3238                      &sensor_orientation,
3239                      1);
3240
3241    int32_t max_output_streams[3] = {1, 3, 1};
3242    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3243                      max_output_streams,
3244                      3);
3245
3246    uint8_t avail_leds = 0;
3247    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3248                      &avail_leds, 0);
3249
3250    uint8_t focus_dist_calibrated;
3251    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3252            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3253            gCamCapability[cameraId]->focus_dist_calibrated);
3254    if (val != NAME_NOT_FOUND) {
3255        focus_dist_calibrated = (uint8_t)val;
3256        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3257                     &focus_dist_calibrated, 1);
3258    }
3259
3260    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3261    size = 0;
3262    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3263            i++) {
3264        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3265                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3266                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3267        if (val != NAME_NOT_FOUND) {
3268            avail_testpattern_modes[size] = val;
3269            size++;
3270        }
3271    }
3272    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3273                      avail_testpattern_modes,
3274                      size);
3275
3276    uint8_t max_pipeline_depth = kMaxInFlight;
3277    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3278                      &max_pipeline_depth,
3279                      1);
3280
3281    int32_t partial_result_count = 2;
3282    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3283                      &partial_result_count,
3284                       1);
3285
3286    uint8_t available_capabilities[] =
3287        {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
3288         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
3289         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM};
3290    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3291                      available_capabilities,
3292                      3);
3293
3294    int32_t max_input_streams = 0;
3295    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3296                      &max_input_streams,
3297                      1);
3298
3299    int32_t io_format_map[] = {};
3300    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3301                      io_format_map, 0);
3302
3303    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
3304    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3305                      &max_latency,
3306                      1);
3307
3308    float optical_axis_angle[2];
3309    optical_axis_angle[0] = 0; //need to verify
3310    optical_axis_angle[1] = 0; //need to verify
3311    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3312                      optical_axis_angle,
3313                      2);
3314
3315    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3316    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3317                      available_hot_pixel_modes,
3318                      1);
3319
3320    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3321                                      ANDROID_EDGE_MODE_FAST};
3322    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3323                      available_edge_modes,
3324                      2);
3325
3326    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3327                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
3328    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3329                      available_noise_red_modes,
3330                      2);
3331
3332    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3333                                         ANDROID_TONEMAP_MODE_FAST,
3334                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
3335    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3336                      available_tonemap_modes,
3337                      3);
3338
3339    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3340    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3341                      available_hot_pixel_map_modes,
3342                      1);
3343
3344
3345    int32_t avail_min_frame_durations_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3346                                                 sizeof(scalar_formats)/sizeof(int32_t) * 4;
3347    int64_t avail_min_frame_durations[avail_min_frame_durations_size];
3348    int pos = 0;
3349    for (int j = 0; j < scalar_formats_count; j++) {
3350        for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3351           avail_min_frame_durations[pos]   = scalar_formats[j];
3352           avail_min_frame_durations[pos+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3353           avail_min_frame_durations[pos+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3354           avail_min_frame_durations[pos+3] = gCamCapability[cameraId]->picture_min_duration[i];
3355           pos+=4;
3356        }
3357    }
3358    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3359                      avail_min_frame_durations,
3360                      avail_min_frame_durations_size);
3361
3362    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
3363       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3364       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3365       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3366       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3367       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3368       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
3369       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3370       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3371       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3372       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3373       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3374       ANDROID_JPEG_GPS_COORDINATES,
3375       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3376       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3377       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3378       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3379       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3380       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3381       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3382       ANDROID_SENSOR_FRAME_DURATION,
3383       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3384       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3385       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3386       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3387       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3388       ANDROID_BLACK_LEVEL_LOCK };
3389    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3390                      available_request_keys,
3391                      sizeof(available_request_keys)/sizeof(int32_t));
3392
3393    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3394       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3395       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
3396       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3397       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3398       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3399       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3400       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3401       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3402       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3403       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3404       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
3405       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3406       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_FORWARD_MATRIX,
3407       ANDROID_SENSOR_COLOR_TRANSFORM, ANDROID_SENSOR_CALIBRATION_TRANSFORM,
3408       ANDROID_SENSOR_SENSITIVITY, ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3409       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
3410       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3411       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
3412       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3413       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3414       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
3415       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
3416       ANDROID_STATISTICS_FACE_SCORES};
3417    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3418                      available_result_keys,
3419                      sizeof(available_result_keys)/sizeof(int32_t));
3420
3421    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3422       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3423       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
3424       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
3425       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3426       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3427       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
3428       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
3429       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3430       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3431       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3432       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3433       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3434       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3435       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3436       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
3437       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3438       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3439       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3440       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3441       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3442       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3443       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3444       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
3445       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3446       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3447       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3448       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3449       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
3450       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3451       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3452       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3453       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3454       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3455       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3456       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3457       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3458       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3459       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3460       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3461       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
3462    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
3463                      available_characteristics_keys,
3464                      sizeof(available_characteristics_keys)/sizeof(int32_t));
3465
3466    gStaticMetadata[cameraId] = staticInfo.release();
3467    return rc;
3468}
3469
3470/*===========================================================================
3471 * FUNCTION   : makeTable
3472 *
3473 * DESCRIPTION: make a table of sizes
3474 *
3475 * PARAMETERS :
3476 *
3477 *
3478 *==========================================================================*/
3479void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
3480                                          int32_t* sizeTable)
3481{
3482    int j = 0;
3483    for (int i = 0; i < size; i++) {
3484        sizeTable[j] = dimTable[i].width;
3485        sizeTable[j+1] = dimTable[i].height;
3486        j+=2;
3487    }
3488}
3489
3490/*===========================================================================
3491 * FUNCTION   : makeFPSTable
3492 *
3493 * DESCRIPTION: make a table of fps ranges
3494 *
3495 * PARAMETERS :
3496 *
3497 *==========================================================================*/
3498void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
3499                                          int32_t* fpsRangesTable)
3500{
3501    int j = 0;
3502    for (int i = 0; i < size; i++) {
3503        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
3504        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
3505        j+=2;
3506    }
3507}
3508
3509/*===========================================================================
3510 * FUNCTION   : makeOverridesList
3511 *
3512 * DESCRIPTION: make a list of scene mode overrides
3513 *
3514 * PARAMETERS :
3515 *
3516 *
3517 *==========================================================================*/
3518void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
3519                                                  uint8_t size, uint8_t* overridesList,
3520                                                  uint8_t* supported_indexes,
3521                                                  int camera_id)
3522{
3523    /*daemon will give a list of overrides for all scene modes.
3524      However we should send the fwk only the overrides for the scene modes
3525      supported by the framework*/
3526    int j = 0, index = 0, supt = 0;
3527    uint8_t focus_override;
3528    for (int i = 0; i < size; i++) {
3529        supt = 0;
3530        index = supported_indexes[i];
3531        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
3532        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3533                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3534                                                    overridesTable[index].awb_mode);
3535        focus_override = (uint8_t)overridesTable[index].af_mode;
3536        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
3537           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
3538              supt = 1;
3539              break;
3540           }
3541        }
3542        if (supt) {
3543           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3544                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3545                                              focus_override);
3546        } else {
3547           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
3548        }
3549        j+=3;
3550    }
3551}
3552
3553/*===========================================================================
3554 * FUNCTION   : getPreviewHalPixelFormat
3555 *
3556 * DESCRIPTION: convert the format to type recognized by framework
3557 *
3558 * PARAMETERS : format : the format from backend
3559 *
3560 ** RETURN    : format recognized by framework
3561 *
3562 *==========================================================================*/
3563int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
3564{
3565    int32_t halPixelFormat;
3566
3567    switch (format) {
3568    case CAM_FORMAT_YUV_420_NV12:
3569        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
3570        break;
3571    case CAM_FORMAT_YUV_420_NV21:
3572        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3573        break;
3574    case CAM_FORMAT_YUV_420_NV21_ADRENO:
3575        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
3576        break;
3577    case CAM_FORMAT_YUV_420_YV12:
3578        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
3579        break;
3580    case CAM_FORMAT_YUV_422_NV16:
3581    case CAM_FORMAT_YUV_422_NV61:
3582    default:
3583        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3584        break;
3585    }
3586    return halPixelFormat;
3587}
3588
3589/*===========================================================================
3590 * FUNCTION   : getSensorSensitivity
3591 *
3592 * DESCRIPTION: convert iso_mode to an integer value
3593 *
3594 * PARAMETERS : iso_mode : the iso_mode supported by sensor
3595 *
3596 ** RETURN    : sensitivity supported by sensor
3597 *
3598 *==========================================================================*/
3599int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
3600{
3601    int32_t sensitivity;
3602
3603    switch (iso_mode) {
3604    case CAM_ISO_MODE_100:
3605        sensitivity = 100;
3606        break;
3607    case CAM_ISO_MODE_200:
3608        sensitivity = 200;
3609        break;
3610    case CAM_ISO_MODE_400:
3611        sensitivity = 400;
3612        break;
3613    case CAM_ISO_MODE_800:
3614        sensitivity = 800;
3615        break;
3616    case CAM_ISO_MODE_1600:
3617        sensitivity = 1600;
3618        break;
3619    default:
3620        sensitivity = -1;
3621        break;
3622    }
3623    return sensitivity;
3624}
3625
3626/*===========================================================================
3627 * FUNCTION   : AddSetMetaEntryToBatch
3628 *
3629 * DESCRIPTION: add set parameter entry into batch
3630 *
3631 * PARAMETERS :
3632 *   @p_table     : ptr to parameter buffer
3633 *   @paramType   : parameter type
3634 *   @paramLength : length of parameter value
3635 *   @paramValue  : ptr to parameter value
3636 *
3637 * RETURN     : int32_t type of status
3638 *              NO_ERROR  -- success
3639 *              none-zero failure code
3640 *==========================================================================*/
3641int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
3642                                                          unsigned int paramType,
3643                                                          uint32_t paramLength,
3644                                                          void *paramValue)
3645{
3646    int position = paramType;
3647    int current, next;
3648
3649    /*************************************************************************
3650    *                 Code to take care of linking next flags                *
3651    *************************************************************************/
3652    current = GET_FIRST_PARAM_ID(p_table);
3653    if (position == current){
3654        //DO NOTHING
3655    } else if (position < current){
3656        SET_NEXT_PARAM_ID(position, p_table, current);
3657        SET_FIRST_PARAM_ID(p_table, position);
3658    } else {
3659        /* Search for the position in the linked list where we need to slot in*/
3660        while (position > GET_NEXT_PARAM_ID(current, p_table))
3661            current = GET_NEXT_PARAM_ID(current, p_table);
3662
3663        /*If node already exists no need to alter linking*/
3664        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
3665            next = GET_NEXT_PARAM_ID(current, p_table);
3666            SET_NEXT_PARAM_ID(current, p_table, position);
3667            SET_NEXT_PARAM_ID(position, p_table, next);
3668        }
3669    }
3670
3671    /*************************************************************************
3672    *                   Copy contents into entry                             *
3673    *************************************************************************/
3674
3675    if (paramLength > sizeof(parm_type_t)) {
3676        ALOGE("%s:Size of input larger than max entry size",__func__);
3677        return BAD_VALUE;
3678    }
3679    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
3680    SET_PARM_VALID_BIT(paramType,p_table,1);
3681    return NO_ERROR;
3682}
3683
3684/*===========================================================================
3685 * FUNCTION   : lookupFwkName
3686 *
3687 * DESCRIPTION: In case the enum is not same in fwk and backend
3688 *              make sure the parameter is correctly propogated
3689 *
3690 * PARAMETERS  :
3691 *   @arr      : map between the two enums
3692 *   @len      : len of the map
3693 *   @hal_name : name of the hal_parm to map
3694 *
3695 * RETURN     : int type of status
3696 *              fwk_name  -- success
3697 *              none-zero failure code
3698 *==========================================================================*/
3699int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
3700                                             int len, int hal_name)
3701{
3702
3703    for (int i = 0; i < len; i++) {
3704        if (arr[i].hal_name == hal_name)
3705            return arr[i].fwk_name;
3706    }
3707
3708    /* Not able to find matching framework type is not necessarily
3709     * an error case. This happens when mm-camera supports more attributes
3710     * than the frameworks do */
3711    ALOGD("%s: Cannot find matching framework type", __func__);
3712    return NAME_NOT_FOUND;
3713}
3714
3715/*===========================================================================
3716 * FUNCTION   : lookupHalName
3717 *
3718 * DESCRIPTION: In case the enum is not same in fwk and backend
3719 *              make sure the parameter is correctly propogated
3720 *
3721 * PARAMETERS  :
3722 *   @arr      : map between the two enums
3723 *   @len      : len of the map
3724 *   @fwk_name : name of the hal_parm to map
3725 *
3726 * RETURN     : int32_t type of status
3727 *              hal_name  -- success
3728 *              none-zero failure code
3729 *==========================================================================*/
3730int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
3731                                             int len, unsigned int fwk_name)
3732{
3733    for (int i = 0; i < len; i++) {
3734       if (arr[i].fwk_name == fwk_name)
3735           return arr[i].hal_name;
3736    }
3737    ALOGE("%s: Cannot find matching hal type", __func__);
3738    return NAME_NOT_FOUND;
3739}
3740
3741/*===========================================================================
3742 * FUNCTION   : getCapabilities
3743 *
3744 * DESCRIPTION: query camera capabilities
3745 *
3746 * PARAMETERS :
3747 *   @cameraId  : camera Id
3748 *   @info      : camera info struct to be filled in with camera capabilities
3749 *
3750 * RETURN     : int32_t type of status
3751 *              NO_ERROR  -- success
3752 *              none-zero failure code
3753 *==========================================================================*/
3754int QCamera3HardwareInterface::getCamInfo(int cameraId,
3755                                    struct camera_info *info)
3756{
3757    int rc = 0;
3758
3759    if (NULL == gCamCapability[cameraId]) {
3760        rc = initCapabilities(cameraId);
3761        if (rc < 0) {
3762            //pthread_mutex_unlock(&g_camlock);
3763            return rc;
3764        }
3765    }
3766
3767    if (NULL == gStaticMetadata[cameraId]) {
3768        rc = initStaticMetadata(cameraId);
3769        if (rc < 0) {
3770            return rc;
3771        }
3772    }
3773
3774    switch(gCamCapability[cameraId]->position) {
3775    case CAM_POSITION_BACK:
3776        info->facing = CAMERA_FACING_BACK;
3777        break;
3778
3779    case CAM_POSITION_FRONT:
3780        info->facing = CAMERA_FACING_FRONT;
3781        break;
3782
3783    default:
3784        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3785        rc = -1;
3786        break;
3787    }
3788
3789
3790    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3791    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
3792    info->static_camera_characteristics = gStaticMetadata[cameraId];
3793
3794    return rc;
3795}
3796
3797/*===========================================================================
3798 * FUNCTION   : translateCapabilityToMetadata
3799 *
3800 * DESCRIPTION: translate the capability into camera_metadata_t
3801 *
3802 * PARAMETERS : type of the request
3803 *
3804 *
3805 * RETURN     : success: camera_metadata_t*
3806 *              failure: NULL
3807 *
3808 *==========================================================================*/
3809camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3810{
3811    pthread_mutex_lock(&mMutex);
3812
3813    if (mDefaultMetadata[type] != NULL) {
3814        pthread_mutex_unlock(&mMutex);
3815        return mDefaultMetadata[type];
3816    }
3817    //first time we are handling this request
3818    //fill up the metadata structure using the wrapper class
3819    CameraMetadata settings;
3820    //translate from cam_capability_t to camera_metadata_tag_t
3821    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3822    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3823    int32_t defaultRequestID = 0;
3824    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3825
3826    uint8_t controlIntent = 0;
3827    uint8_t focusMode;
3828    switch (type) {
3829      case CAMERA3_TEMPLATE_PREVIEW:
3830        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3831        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3832        break;
3833      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3834        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3835        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3836        break;
3837      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3838        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3839        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3840        break;
3841      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3842        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3843        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3844        break;
3845      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3846        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3847        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3848        break;
3849      default:
3850        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3851        break;
3852    }
3853    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3854
3855    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
3856        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3857    }
3858    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3859
3860    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3861            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3862
3863    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3864    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3865
3866    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3867    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3868
3869    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3870    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3871
3872    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3873    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3874
3875    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3876    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3877
3878    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3879    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3880
3881    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3882    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3883
3884    /*flash*/
3885    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3886    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3887
3888    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3889    settings.update(ANDROID_FLASH_FIRING_POWER,
3890            &flashFiringLevel, 1);
3891
3892    /* lens */
3893    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3894    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3895
3896    if (gCamCapability[mCameraId]->filter_densities_count) {
3897        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3898        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3899                        gCamCapability[mCameraId]->filter_densities_count);
3900    }
3901
3902    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3903    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3904
3905    /* Exposure time(Update the Min Exposure Time)*/
3906    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3907    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3908
3909    /* frame duration */
3910    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3911    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3912
3913    /* sensitivity */
3914    static const int32_t default_sensitivity = 100;
3915    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3916
3917    /*edge mode*/
3918    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3919    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3920
3921    /*noise reduction mode*/
3922    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3923    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3924
3925    /*color correction mode*/
3926    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3927    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3928
3929    /*transform matrix mode*/
3930    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3931    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3932
3933    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3934    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3935
3936    int32_t scaler_crop_region[4];
3937    scaler_crop_region[0] = 0;
3938    scaler_crop_region[1] = 0;
3939    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3940    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3941    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3942
3943    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3944    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3945
3946    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3947    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3948
3949    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3950                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3951                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3952    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3953
3954    /*focus distance*/
3955    float focus_distance = 0.0;
3956    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
3957
3958    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
3959    float max_range = 0.0;
3960    float max_fixed_fps = 0.0;
3961    int32_t fps_range[2] = {0, 0};
3962    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
3963            i++) {
3964        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
3965            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
3966        if (type == CAMERA3_TEMPLATE_PREVIEW ||
3967                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
3968                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
3969            if (range > max_range) {
3970                fps_range[0] =
3971                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
3972                fps_range[1] =
3973                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
3974                max_range = range;
3975            }
3976        } else {
3977            if (range < 0.01 && max_fixed_fps <
3978                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
3979                fps_range[0] =
3980                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
3981                fps_range[1] =
3982                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
3983                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
3984            }
3985        }
3986    }
3987    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
3988
3989    /*precapture trigger*/
3990    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
3991    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
3992
3993    /*af trigger*/
3994    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
3995    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
3996
3997    /* ae & af regions */
3998    int32_t active_region[] = {
3999            gCamCapability[mCameraId]->active_array_size.left,
4000            gCamCapability[mCameraId]->active_array_size.top,
4001            gCamCapability[mCameraId]->active_array_size.left +
4002                    gCamCapability[mCameraId]->active_array_size.width,
4003            gCamCapability[mCameraId]->active_array_size.top +
4004                    gCamCapability[mCameraId]->active_array_size.height,
4005            1};
4006    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
4007    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
4008
4009    /* black level lock */
4010    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4011    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
4012
4013    /* face detect mode */
4014    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
4015    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
4016
4017    /* lens shading map mode */
4018    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4019    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
4020
4021    mDefaultMetadata[type] = settings.release();
4022
4023    pthread_mutex_unlock(&mMutex);
4024    return mDefaultMetadata[type];
4025}
4026
4027/*===========================================================================
4028 * FUNCTION   : setFrameParameters
4029 *
4030 * DESCRIPTION: set parameters per frame as requested in the metadata from
4031 *              framework
4032 *
4033 * PARAMETERS :
4034 *   @request   : request that needs to be serviced
4035 *   @streamID : Stream ID of all the requested streams
4036 *
4037 * RETURN     : success: NO_ERROR
4038 *              failure:
4039 *==========================================================================*/
4040int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
4041                    cam_stream_ID_t streamID)
4042{
4043    /*translate from camera_metadata_t type to parm_type_t*/
4044    int rc = 0;
4045    int32_t hal_version = CAM_HAL_V3;
4046
4047    memset(mParameters, 0, sizeof(metadata_buffer_t));
4048    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
4049    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
4050                sizeof(hal_version), &hal_version);
4051    if (rc < 0) {
4052        ALOGE("%s: Failed to set hal version in the parameters", __func__);
4053        return BAD_VALUE;
4054    }
4055
4056    /*we need to update the frame number in the parameters*/
4057    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
4058                                sizeof(request->frame_number), &(request->frame_number));
4059    if (rc < 0) {
4060        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4061        return BAD_VALUE;
4062    }
4063
4064    /* Update stream id of all the requested buffers */
4065    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
4066                                sizeof(cam_stream_ID_t), &streamID);
4067
4068    if (rc < 0) {
4069        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
4070        return BAD_VALUE;
4071    }
4072
4073    if(request->settings != NULL){
4074        rc = translateToHalMetadata(request, mParameters);
4075    }
4076
4077    /*set the parameters to backend*/
4078    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
4079    return rc;
4080}
4081
4082/*===========================================================================
4083 * FUNCTION   : setReprocParameters
4084 *
4085 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
4086 *              queue it to picture channel for reprocessing.
4087 *
4088 * PARAMETERS :
4089 *   @request   : request that needs to be serviced
4090 *
4091 * RETURN     : success: NO_ERROR
4092 *              failure: non zero failure code
4093 *==========================================================================*/
4094int QCamera3HardwareInterface::setReprocParameters(camera3_capture_request_t *request)
4095{
4096    /*translate from camera_metadata_t type to parm_type_t*/
4097    int rc = 0;
4098    metadata_buffer_t *reprocParam = NULL;
4099
4100    if(request->settings != NULL){
4101        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
4102        return BAD_VALUE;
4103    }
4104    reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
4105    if (!reprocParam) {
4106        ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
4107        return NO_MEMORY;
4108    }
4109    memset(reprocParam, 0, sizeof(metadata_buffer_t));
4110    reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
4111
4112    /*we need to update the frame number in the parameters*/
4113    rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
4114                                sizeof(request->frame_number), &(request->frame_number));
4115    if (rc < 0) {
4116        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4117        return BAD_VALUE;
4118    }
4119
4120
4121    rc = translateToHalMetadata(request, reprocParam);
4122    if (rc < 0) {
4123        ALOGE("%s: Failed to translate reproc request", __func__);
4124        delete reprocParam;
4125        return rc;
4126    }
4127    /*queue metadata for reprocessing*/
4128    rc = mPictureChannel->queueReprocMetadata(reprocParam);
4129    if (rc < 0) {
4130        ALOGE("%s: Failed to queue reprocessing metadata", __func__);
4131        delete reprocParam;
4132    }
4133
4134    return rc;
4135}
4136
4137/*===========================================================================
4138 * FUNCTION   : translateToHalMetadata
4139 *
4140 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
4141 *
4142 *
4143 * PARAMETERS :
4144 *   @request  : request sent from framework
4145 *
4146 *
4147 * RETURN     : success: NO_ERROR
4148 *              failure:
4149 *==========================================================================*/
4150int QCamera3HardwareInterface::translateToHalMetadata
4151                                  (const camera3_capture_request_t *request,
4152                                   metadata_buffer_t *hal_metadata)
4153{
4154    int rc = 0;
4155    CameraMetadata frame_settings;
4156    frame_settings = request->settings;
4157
4158    /* Do not change the order of the following list unless you know what you are
4159     * doing.
4160     * The order is laid out in such a way that parameters in the front of the table
4161     * may be used to override the parameters later in the table. Examples are:
4162     * 1. META_MODE should precede AEC/AWB/AF MODE
4163     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
4164     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
4165     * 4. Any mode should precede it's corresponding settings
4166     */
4167    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4168        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4169        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
4170                sizeof(metaMode), &metaMode);
4171        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4172           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4173           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4174                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4175                                             fwk_sceneMode);
4176           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4177                sizeof(sceneMode), &sceneMode);
4178        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4179           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4180           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4181                sizeof(sceneMode), &sceneMode);
4182        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4183           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4184           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4185                sizeof(sceneMode), &sceneMode);
4186        }
4187    }
4188
4189    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4190        uint8_t fwk_aeMode =
4191            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4192        uint8_t aeMode;
4193        int32_t redeye;
4194
4195        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4196            aeMode = CAM_AE_MODE_OFF;
4197        } else {
4198            aeMode = CAM_AE_MODE_ON;
4199        }
4200        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4201            redeye = 1;
4202        } else {
4203            redeye = 0;
4204        }
4205
4206        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
4207                                          sizeof(AE_FLASH_MODE_MAP),
4208                                          fwk_aeMode);
4209        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
4210                sizeof(aeMode), &aeMode);
4211        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4212                sizeof(flashMode), &flashMode);
4213        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
4214                sizeof(redeye), &redeye);
4215    }
4216
4217    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
4218        uint8_t fwk_whiteLevel =
4219            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
4220        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
4221                sizeof(WHITE_BALANCE_MODES_MAP),
4222                fwk_whiteLevel);
4223        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
4224                sizeof(whiteLevel), &whiteLevel);
4225    }
4226
4227    float focalDistance = -1.0;
4228    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
4229        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
4230        rc = AddSetMetaEntryToBatch(hal_metadata,
4231                CAM_INTF_META_LENS_FOCUS_DISTANCE,
4232                sizeof(focalDistance), &focalDistance);
4233    }
4234
4235    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
4236        uint8_t fwk_focusMode =
4237            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
4238        uint8_t focusMode;
4239        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
4240            focusMode = CAM_FOCUS_MODE_INFINITY;
4241        } else{
4242         focusMode = lookupHalName(FOCUS_MODES_MAP,
4243                                   sizeof(FOCUS_MODES_MAP),
4244                                   fwk_focusMode);
4245        }
4246        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
4247                sizeof(focusMode), &focusMode);
4248    }
4249
4250    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
4251        int32_t antibandingMode =
4252            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
4253        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
4254                sizeof(antibandingMode), &antibandingMode);
4255    }
4256
4257    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4258        int32_t expCompensation = frame_settings.find(
4259            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4260        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4261            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4262        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4263            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4264        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4265          sizeof(expCompensation), &expCompensation);
4266    }
4267
4268    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4269        int32_t expCompensation = frame_settings.find(
4270            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4271        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4272            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4273        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4274            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4275        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4276          sizeof(expCompensation), &expCompensation);
4277    }
4278
4279    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
4280        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
4281        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
4282                sizeof(aeLock), &aeLock);
4283    }
4284    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4285        cam_fps_range_t fps_range;
4286        fps_range.min_fps =
4287            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
4288        fps_range.max_fps =
4289            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4290        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
4291                sizeof(fps_range), &fps_range);
4292    }
4293
4294    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
4295        uint8_t awbLock =
4296            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
4297        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
4298                sizeof(awbLock), &awbLock);
4299    }
4300
4301    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
4302        uint8_t fwk_effectMode =
4303            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
4304        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
4305                sizeof(EFFECT_MODES_MAP),
4306                fwk_effectMode);
4307        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
4308                sizeof(effectMode), &effectMode);
4309    }
4310
4311    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
4312        uint8_t colorCorrectMode =
4313            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
4314        rc =
4315            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
4316                    sizeof(colorCorrectMode), &colorCorrectMode);
4317    }
4318
4319    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
4320        cam_color_correct_gains_t colorCorrectGains;
4321        for (int i = 0; i < 4; i++) {
4322            colorCorrectGains.gains[i] =
4323                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
4324        }
4325        rc =
4326            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
4327                    sizeof(colorCorrectGains), &colorCorrectGains);
4328    }
4329
4330    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
4331        cam_color_correct_matrix_t colorCorrectTransform;
4332        cam_rational_type_t transform_elem;
4333        int num = 0;
4334        for (int i = 0; i < 3; i++) {
4335           for (int j = 0; j < 3; j++) {
4336              transform_elem.numerator =
4337                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
4338              transform_elem.denominator =
4339                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
4340              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
4341              num++;
4342           }
4343        }
4344        rc =
4345            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
4346                    sizeof(colorCorrectTransform), &colorCorrectTransform);
4347    }
4348
4349    cam_trigger_t aecTrigger;
4350    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
4351    aecTrigger.trigger_id = -1;
4352    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
4353        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
4354        aecTrigger.trigger =
4355            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
4356        aecTrigger.trigger_id =
4357            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
4358        rc = AddSetMetaEntryToBatch(hal_metadata,
4359                CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
4360                sizeof(aecTrigger), &aecTrigger);
4361    }
4362    /*af_trigger must come with a trigger id*/
4363    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
4364        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
4365        cam_trigger_t af_trigger;
4366        af_trigger.trigger =
4367            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
4368        af_trigger.trigger_id =
4369            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
4370        rc = AddSetMetaEntryToBatch(hal_metadata,
4371                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
4372    }
4373
4374    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
4375        int32_t demosaic =
4376            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
4377        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
4378                sizeof(demosaic), &demosaic);
4379    }
4380
4381    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
4382        cam_edge_application_t edge_application;
4383        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4384        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
4385            edge_application.sharpness = 0;
4386        } else {
4387            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
4388                uint8_t edgeStrength =
4389                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
4390                edge_application.sharpness = (int32_t)edgeStrength;
4391            } else {
4392                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
4393            }
4394        }
4395        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
4396                sizeof(edge_application), &edge_application);
4397    }
4398
4399    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
4400        int32_t respectFlashMode = 1;
4401        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4402            uint8_t fwk_aeMode =
4403                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4404            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
4405                respectFlashMode = 0;
4406                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
4407                    __func__);
4408            }
4409        }
4410        if (respectFlashMode) {
4411            uint8_t flashMode =
4412                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
4413            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
4414                                          sizeof(FLASH_MODES_MAP),
4415                                          flashMode);
4416            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
4417            // To check: CAM_INTF_META_FLASH_MODE usage
4418            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4419                          sizeof(flashMode), &flashMode);
4420        }
4421    }
4422
4423    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
4424        uint8_t flashPower =
4425            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
4426        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
4427                sizeof(flashPower), &flashPower);
4428    }
4429
4430    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
4431        int64_t flashFiringTime =
4432            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
4433        rc = AddSetMetaEntryToBatch(hal_metadata,
4434                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
4435    }
4436
4437    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
4438        uint8_t hotPixelMode =
4439            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
4440        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
4441                sizeof(hotPixelMode), &hotPixelMode);
4442    }
4443
4444    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
4445        float lensAperture =
4446            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
4447        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
4448                sizeof(lensAperture), &lensAperture);
4449    }
4450
4451    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
4452        float filterDensity =
4453            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
4454        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
4455                sizeof(filterDensity), &filterDensity);
4456    }
4457
4458    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4459        float focalLength =
4460            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4461        rc = AddSetMetaEntryToBatch(hal_metadata,
4462                CAM_INTF_META_LENS_FOCAL_LENGTH,
4463                sizeof(focalLength), &focalLength);
4464    }
4465
4466    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
4467        uint8_t optStabMode =
4468            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
4469        rc = AddSetMetaEntryToBatch(hal_metadata,
4470                CAM_INTF_META_LENS_OPT_STAB_MODE,
4471                sizeof(optStabMode), &optStabMode);
4472    }
4473
4474    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4475        uint8_t noiseRedMode =
4476            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4477        rc = AddSetMetaEntryToBatch(hal_metadata,
4478                CAM_INTF_META_NOISE_REDUCTION_MODE,
4479                sizeof(noiseRedMode), &noiseRedMode);
4480    }
4481
4482    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
4483        uint8_t noiseRedStrength =
4484            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
4485        rc = AddSetMetaEntryToBatch(hal_metadata,
4486                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
4487                sizeof(noiseRedStrength), &noiseRedStrength);
4488    }
4489
4490    cam_crop_region_t scalerCropRegion;
4491    bool scalerCropSet = false;
4492    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
4493        scalerCropRegion.left =
4494            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
4495        scalerCropRegion.top =
4496            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
4497        scalerCropRegion.width =
4498            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
4499        scalerCropRegion.height =
4500            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
4501        rc = AddSetMetaEntryToBatch(hal_metadata,
4502                CAM_INTF_META_SCALER_CROP_REGION,
4503                sizeof(scalerCropRegion), &scalerCropRegion);
4504        scalerCropSet = true;
4505    }
4506
4507    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
4508        int64_t sensorExpTime =
4509            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
4510        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
4511        rc = AddSetMetaEntryToBatch(hal_metadata,
4512                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
4513                sizeof(sensorExpTime), &sensorExpTime);
4514    }
4515
4516    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
4517        int64_t sensorFrameDuration =
4518            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
4519        int64_t minFrameDuration = getMinFrameDuration(request);
4520        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
4521        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
4522            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
4523        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
4524        rc = AddSetMetaEntryToBatch(hal_metadata,
4525                CAM_INTF_META_SENSOR_FRAME_DURATION,
4526                sizeof(sensorFrameDuration), &sensorFrameDuration);
4527    }
4528
4529    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4530        int32_t sensorSensitivity =
4531            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4532        if (sensorSensitivity <
4533                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
4534            sensorSensitivity =
4535                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
4536        if (sensorSensitivity >
4537                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
4538            sensorSensitivity =
4539                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
4540        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
4541        rc = AddSetMetaEntryToBatch(hal_metadata,
4542                CAM_INTF_META_SENSOR_SENSITIVITY,
4543                sizeof(sensorSensitivity), &sensorSensitivity);
4544    }
4545
4546    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
4547        int32_t shadingMode =
4548            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
4549        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
4550                sizeof(shadingMode), &shadingMode);
4551    }
4552
4553    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
4554        uint8_t shadingStrength =
4555            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
4556        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
4557                sizeof(shadingStrength), &shadingStrength);
4558    }
4559
4560    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
4561        uint8_t fwk_facedetectMode =
4562            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
4563        uint8_t facedetectMode =
4564            lookupHalName(FACEDETECT_MODES_MAP,
4565                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
4566        rc = AddSetMetaEntryToBatch(hal_metadata,
4567                CAM_INTF_META_STATS_FACEDETECT_MODE,
4568                sizeof(facedetectMode), &facedetectMode);
4569    }
4570
4571    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
4572        uint8_t histogramMode =
4573            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
4574        rc = AddSetMetaEntryToBatch(hal_metadata,
4575                CAM_INTF_META_STATS_HISTOGRAM_MODE,
4576                sizeof(histogramMode), &histogramMode);
4577    }
4578
4579    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
4580        uint8_t sharpnessMapMode =
4581            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
4582        rc = AddSetMetaEntryToBatch(hal_metadata,
4583                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
4584                sizeof(sharpnessMapMode), &sharpnessMapMode);
4585    }
4586
4587    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
4588        uint8_t tonemapMode =
4589            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
4590        rc = AddSetMetaEntryToBatch(hal_metadata,
4591                CAM_INTF_META_TONEMAP_MODE,
4592                sizeof(tonemapMode), &tonemapMode);
4593    }
4594    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
4595    /*All tonemap channels will have the same number of points*/
4596    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
4597        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
4598        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
4599        cam_rgb_tonemap_curves tonemapCurves;
4600        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
4601
4602        /* ch0 = G*/
4603        int point = 0;
4604        cam_tonemap_curve_t tonemapCurveGreen;
4605        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
4606            for (int j = 0; j < 2; j++) {
4607               tonemapCurveGreen.tonemap_points[i][j] =
4608                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
4609               point++;
4610            }
4611        }
4612        tonemapCurves.curves[0] = tonemapCurveGreen;
4613
4614        /* ch 1 = B */
4615        point = 0;
4616        cam_tonemap_curve_t tonemapCurveBlue;
4617        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4618            for (int j = 0; j < 2; j++) {
4619               tonemapCurveBlue.tonemap_points[i][j] =
4620                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
4621               point++;
4622            }
4623        }
4624        tonemapCurves.curves[1] = tonemapCurveBlue;
4625
4626        /* ch 2 = R */
4627        point = 0;
4628        cam_tonemap_curve_t tonemapCurveRed;
4629        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4630            for (int j = 0; j < 2; j++) {
4631               tonemapCurveRed.tonemap_points[i][j] =
4632                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
4633               point++;
4634            }
4635        }
4636        tonemapCurves.curves[2] = tonemapCurveRed;
4637
4638        rc = AddSetMetaEntryToBatch(hal_metadata,
4639                CAM_INTF_META_TONEMAP_CURVES,
4640                sizeof(tonemapCurves), &tonemapCurves);
4641    }
4642
4643    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4644        uint8_t captureIntent =
4645            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4646        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
4647                sizeof(captureIntent), &captureIntent);
4648    }
4649
4650    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
4651        uint8_t blackLevelLock =
4652            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
4653        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
4654                sizeof(blackLevelLock), &blackLevelLock);
4655    }
4656
4657    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
4658        uint8_t lensShadingMapMode =
4659            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
4660        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
4661                sizeof(lensShadingMapMode), &lensShadingMapMode);
4662    }
4663
4664    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
4665        cam_area_t roi;
4666        bool reset = true;
4667        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
4668        if (scalerCropSet) {
4669            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4670        }
4671        if (reset) {
4672            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
4673                    sizeof(roi), &roi);
4674        }
4675    }
4676
4677    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
4678        cam_area_t roi;
4679        bool reset = true;
4680        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
4681        if (scalerCropSet) {
4682            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4683        }
4684        if (reset) {
4685            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
4686                    sizeof(roi), &roi);
4687        }
4688    }
4689
4690    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
4691        cam_area_t roi;
4692        bool reset = true;
4693        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
4694        if (scalerCropSet) {
4695            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4696        }
4697        if (reset) {
4698            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AWB_REGIONS,
4699                    sizeof(roi), &roi);
4700        }
4701    }
4702
4703    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
4704        cam_test_pattern_data_t testPatternData;
4705        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
4706        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
4707               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
4708
4709        memset(&testPatternData, 0, sizeof(testPatternData));
4710        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
4711        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
4712                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
4713            int32_t* fwk_testPatternData = frame_settings.find(
4714                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
4715            testPatternData.r = fwk_testPatternData[0];
4716            testPatternData.b = fwk_testPatternData[3];
4717            switch (gCamCapability[mCameraId]->color_arrangement) {
4718            case CAM_FILTER_ARRANGEMENT_RGGB:
4719            case CAM_FILTER_ARRANGEMENT_GRBG:
4720                testPatternData.gr = fwk_testPatternData[1];
4721                testPatternData.gb = fwk_testPatternData[2];
4722                break;
4723            case CAM_FILTER_ARRANGEMENT_GBRG:
4724            case CAM_FILTER_ARRANGEMENT_BGGR:
4725                testPatternData.gr = fwk_testPatternData[2];
4726                testPatternData.gb = fwk_testPatternData[1];
4727                break;
4728            default:
4729                ALOGE("%s: color arrangement %d is not supported", __func__,
4730                    gCamCapability[mCameraId]->color_arrangement);
4731                break;
4732            }
4733        }
4734        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
4735            sizeof(testPatternData), &testPatternData);
4736    }
4737
4738    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
4739        double *gps_coords =
4740            frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
4741        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
4742    }
4743
4744    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
4745        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
4746        const char *gps_methods_src = (const char *)
4747                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
4748        uint32_t count = frame_settings.find(
4749                ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
4750        memset(gps_methods, 0, sizeof(gps_methods));
4751        strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
4752        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
4753    }
4754
4755    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
4756        int64_t gps_timestamp =
4757            frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
4758        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
4759    }
4760
4761    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
4762        int32_t orientation =
4763            frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
4764        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
4765    }
4766
4767    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
4768        int8_t quality =
4769            frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
4770        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
4771    }
4772
4773    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
4774        int8_t thumb_quality =
4775            frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
4776        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
4777    }
4778
4779    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
4780        cam_dimension_t dim;
4781        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
4782        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
4783        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
4784    }
4785
4786    // Internal metadata
4787    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
4788        uint8_t* privatedata =
4789            frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
4790        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
4791            sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
4792    }
4793
4794    // EV step
4795    rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
4796            sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
4797
4798    return rc;
4799}
4800
4801/*===========================================================================
4802 * FUNCTION   : captureResultCb
4803 *
4804 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
4805 *
4806 * PARAMETERS :
4807 *   @frame  : frame information from mm-camera-interface
4808 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
4809 *   @userdata: userdata
4810 *
4811 * RETURN     : NONE
4812 *==========================================================================*/
4813void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
4814                camera3_stream_buffer_t *buffer,
4815                uint32_t frame_number, void *userdata)
4816{
4817    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
4818    if (hw == NULL) {
4819        ALOGE("%s: Invalid hw %p", __func__, hw);
4820        return;
4821    }
4822
4823    hw->captureResultCb(metadata, buffer, frame_number);
4824    return;
4825}
4826
4827
4828/*===========================================================================
4829 * FUNCTION   : initialize
4830 *
4831 * DESCRIPTION: Pass framework callback pointers to HAL
4832 *
4833 * PARAMETERS :
4834 *
4835 *
4836 * RETURN     : Success : 0
4837 *              Failure: -ENODEV
4838 *==========================================================================*/
4839
4840int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
4841                                  const camera3_callback_ops_t *callback_ops)
4842{
4843    ALOGV("%s: E", __func__);
4844    QCamera3HardwareInterface *hw =
4845        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4846    if (!hw) {
4847        ALOGE("%s: NULL camera device", __func__);
4848        return -ENODEV;
4849    }
4850
4851    int rc = hw->initialize(callback_ops);
4852    ALOGV("%s: X", __func__);
4853    return rc;
4854}
4855
4856/*===========================================================================
4857 * FUNCTION   : configure_streams
4858 *
4859 * DESCRIPTION:
4860 *
4861 * PARAMETERS :
4862 *
4863 *
4864 * RETURN     : Success: 0
4865 *              Failure: -EINVAL (if stream configuration is invalid)
4866 *                       -ENODEV (fatal error)
4867 *==========================================================================*/
4868
4869int QCamera3HardwareInterface::configure_streams(
4870        const struct camera3_device *device,
4871        camera3_stream_configuration_t *stream_list)
4872{
4873    ALOGV("%s: E", __func__);
4874    QCamera3HardwareInterface *hw =
4875        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4876    if (!hw) {
4877        ALOGE("%s: NULL camera device", __func__);
4878        return -ENODEV;
4879    }
4880    int rc = hw->configureStreams(stream_list);
4881    ALOGV("%s: X", __func__);
4882    return rc;
4883}
4884
4885/*===========================================================================
4886 * FUNCTION   : register_stream_buffers
4887 *
4888 * DESCRIPTION: Register stream buffers with the device
4889 *
4890 * PARAMETERS :
4891 *
4892 * RETURN     :
4893 *==========================================================================*/
4894int QCamera3HardwareInterface::register_stream_buffers(
4895        const struct camera3_device *device,
4896        const camera3_stream_buffer_set_t *buffer_set)
4897{
4898    ALOGV("%s: E", __func__);
4899    QCamera3HardwareInterface *hw =
4900        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4901    if (!hw) {
4902        ALOGE("%s: NULL camera device", __func__);
4903        return -ENODEV;
4904    }
4905    int rc = hw->registerStreamBuffers(buffer_set);
4906    ALOGV("%s: X", __func__);
4907    return rc;
4908}
4909
4910/*===========================================================================
4911 * FUNCTION   : construct_default_request_settings
4912 *
4913 * DESCRIPTION: Configure a settings buffer to meet the required use case
4914 *
4915 * PARAMETERS :
4916 *
4917 *
4918 * RETURN     : Success: Return valid metadata
4919 *              Failure: Return NULL
4920 *==========================================================================*/
4921const camera_metadata_t* QCamera3HardwareInterface::
4922    construct_default_request_settings(const struct camera3_device *device,
4923                                        int type)
4924{
4925
4926    ALOGV("%s: E", __func__);
4927    camera_metadata_t* fwk_metadata = NULL;
4928    QCamera3HardwareInterface *hw =
4929        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4930    if (!hw) {
4931        ALOGE("%s: NULL camera device", __func__);
4932        return NULL;
4933    }
4934
4935    fwk_metadata = hw->translateCapabilityToMetadata(type);
4936
4937    ALOGV("%s: X", __func__);
4938    return fwk_metadata;
4939}
4940
4941/*===========================================================================
4942 * FUNCTION   : process_capture_request
4943 *
4944 * DESCRIPTION:
4945 *
4946 * PARAMETERS :
4947 *
4948 *
4949 * RETURN     :
4950 *==========================================================================*/
4951int QCamera3HardwareInterface::process_capture_request(
4952                    const struct camera3_device *device,
4953                    camera3_capture_request_t *request)
4954{
4955    ALOGV("%s: E", __func__);
4956    QCamera3HardwareInterface *hw =
4957        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4958    if (!hw) {
4959        ALOGE("%s: NULL camera device", __func__);
4960        return -EINVAL;
4961    }
4962
4963    int rc = hw->processCaptureRequest(request);
4964    ALOGV("%s: X", __func__);
4965    return rc;
4966}
4967
4968/*===========================================================================
4969 * FUNCTION   : get_metadata_vendor_tag_ops
4970 *
4971 * DESCRIPTION: Get the metadata vendor tag function pointers
4972 *
4973 * PARAMETERS :
4974 *    @ops   : function pointer table to be filled by HAL
4975 *
4976 *
4977 * RETURN     : NONE
4978 *==========================================================================*/
4979void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4980                const struct camera3_device * /*device*/,
4981                vendor_tag_query_ops_t* ops)
4982{
4983    ALOGV("%s: E", __func__);
4984    ops->get_camera_vendor_section_name = get_camera_vendor_section_name;
4985    ops->get_camera_vendor_tag_name = get_camera_vendor_tag_name;
4986    ops->get_camera_vendor_tag_type = get_camera_vendor_tag_type;
4987    ALOGV("%s: X", __func__);
4988    return;
4989}
4990
4991/*===========================================================================
4992 * FUNCTION   : get_camera_vendor_section_name
4993 *
4994 * DESCRIPTION: Get section name for vendor tag
4995 *
4996 * PARAMETERS :
4997 *    @tag   :  Vendor specific tag
4998 *
4999 *
5000 * RETURN     : Success: the section name of the specific tag
5001 *              Failure: NULL
5002 *==========================================================================*/
5003
5004const char* QCamera3HardwareInterface::get_camera_vendor_section_name(
5005                const vendor_tag_query_ops_t * /*ops*/,
5006                uint32_t tag)
5007{
5008    ALOGV("%s: E", __func__);
5009    const char *ret;
5010    uint32_t section = tag >> 16;
5011
5012    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
5013        ret = NULL;
5014    else
5015        ret = qcamera3_ext_section_names[section - VENDOR_SECTION];
5016
5017    ALOGV("%s: X", __func__);
5018    return ret;
5019}
5020
5021/*===========================================================================
5022 * FUNCTION   : get_camera_vendor_tag_name
5023 *
5024 * DESCRIPTION: Get name of a vendor specific tag
5025 *
5026 * PARAMETERS :
5027 *    @tag   :  Vendor specific tag
5028 *
5029 *
5030 * RETURN     : Success: the name of the specific tag
5031 *              Failure: NULL
5032 *==========================================================================*/
5033const char* QCamera3HardwareInterface::get_camera_vendor_tag_name(
5034                const vendor_tag_query_ops_t * /*ops*/,
5035                uint32_t tag)
5036{
5037    ALOGV("%s: E", __func__);
5038    const char *ret;
5039    uint32_t section = tag >> 16;
5040    uint32_t section_index = section - VENDOR_SECTION;
5041    uint32_t tag_index = tag & 0xFFFF;
5042
5043    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
5044        ret = NULL;
5045    else if (tag >= (uint32_t)qcamera3_ext3_section_bounds[section_index])
5046        ret = NULL;
5047    else
5048        ret = qcamera3_tag_info[section_index][tag_index].tag_name;
5049
5050    ALOGV("%s: X", __func__);
5051    return ret;
5052}
5053
5054/*===========================================================================
5055 * FUNCTION   : get_camera_vendor_tag_type
5056 *
5057 * DESCRIPTION: Get type of a vendor specific tag
5058 *
5059 * PARAMETERS :
5060 *    @tag   :  Vendor specific tag
5061 *
5062 *
5063 * RETURN     : Success: the type of the specific tag
5064 *              Failure: -1
5065 *==========================================================================*/
5066int QCamera3HardwareInterface::get_camera_vendor_tag_type(
5067                const vendor_tag_query_ops_t * /*ops*/,
5068                uint32_t tag)
5069{
5070    ALOGV("%s: E", __func__);
5071    int ret;
5072    uint32_t section = tag >> 16;
5073    uint32_t section_index = section - VENDOR_SECTION;
5074    uint32_t tag_index = tag & 0xFFFF;
5075
5076    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
5077        ret = -1;
5078    else if (tag >= (uint32_t )qcamera3_ext3_section_bounds[section_index])
5079        ret = -1;
5080    else
5081        ret = qcamera3_tag_info[section_index][tag_index].tag_type;
5082
5083    ALOGV("%s: X", __func__);
5084    return ret;
5085}
5086
5087/*===========================================================================
5088 * FUNCTION   : dump
5089 *
5090 * DESCRIPTION:
5091 *
5092 * PARAMETERS :
5093 *
5094 *
5095 * RETURN     :
5096 *==========================================================================*/
5097
5098void QCamera3HardwareInterface::dump(
5099                const struct camera3_device *device, int fd)
5100{
5101    ALOGV("%s: E", __func__);
5102    QCamera3HardwareInterface *hw =
5103        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5104    if (!hw) {
5105        ALOGE("%s: NULL camera device", __func__);
5106        return;
5107    }
5108
5109    hw->dump(fd);
5110    ALOGV("%s: X", __func__);
5111    return;
5112}
5113
5114/*===========================================================================
5115 * FUNCTION   : flush
5116 *
5117 * DESCRIPTION:
5118 *
5119 * PARAMETERS :
5120 *
5121 *
5122 * RETURN     :
5123 *==========================================================================*/
5124
5125int QCamera3HardwareInterface::flush(
5126                const struct camera3_device *device)
5127{
5128    int rc;
5129    ALOGV("%s: E", __func__);
5130    QCamera3HardwareInterface *hw =
5131        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5132    if (!hw) {
5133        ALOGE("%s: NULL camera device", __func__);
5134        return -EINVAL;
5135    }
5136
5137    rc = hw->flush();
5138    ALOGV("%s: X", __func__);
5139    return rc;
5140}
5141
5142/*===========================================================================
5143 * FUNCTION   : close_camera_device
5144 *
5145 * DESCRIPTION:
5146 *
5147 * PARAMETERS :
5148 *
5149 *
5150 * RETURN     :
5151 *==========================================================================*/
5152int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
5153{
5154    ALOGV("%s: E", __func__);
5155    int ret = NO_ERROR;
5156    QCamera3HardwareInterface *hw =
5157        reinterpret_cast<QCamera3HardwareInterface *>(
5158            reinterpret_cast<camera3_device_t *>(device)->priv);
5159    if (!hw) {
5160        ALOGE("NULL camera device");
5161        return BAD_VALUE;
5162    }
5163    delete hw;
5164
5165    pthread_mutex_lock(&mCameraSessionLock);
5166    mCameraSessionActive = 0;
5167    pthread_mutex_unlock(&mCameraSessionLock);
5168    ALOGV("%s: X", __func__);
5169    return ret;
5170}
5171
5172/*===========================================================================
5173 * FUNCTION   : getWaveletDenoiseProcessPlate
5174 *
5175 * DESCRIPTION: query wavelet denoise process plate
5176 *
5177 * PARAMETERS : None
5178 *
5179 * RETURN     : WNR prcocess plate vlaue
5180 *==========================================================================*/
5181cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
5182{
5183    char prop[PROPERTY_VALUE_MAX];
5184    memset(prop, 0, sizeof(prop));
5185    property_get("persist.denoise.process.plates", prop, "0");
5186    int processPlate = atoi(prop);
5187    switch(processPlate) {
5188    case 0:
5189        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
5190    case 1:
5191        return CAM_WAVELET_DENOISE_CBCR_ONLY;
5192    case 2:
5193        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5194    case 3:
5195        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
5196    default:
5197        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5198    }
5199}
5200
5201/*===========================================================================
5202 * FUNCTION   : needRotationReprocess
5203 *
5204 * DESCRIPTION: if rotation needs to be done by reprocess in pp
5205 *
5206 * PARAMETERS : none
5207 *
5208 * RETURN     : true: needed
5209 *              false: no need
5210 *==========================================================================*/
5211bool QCamera3HardwareInterface::needRotationReprocess()
5212{
5213    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
5214        // current rotation is not zero, and pp has the capability to process rotation
5215        ALOGD("%s: need do reprocess for rotation", __func__);
5216        return true;
5217    }
5218
5219    return false;
5220}
5221
5222/*===========================================================================
5223 * FUNCTION   : needReprocess
5224 *
5225 * DESCRIPTION: if reprocess in needed
5226 *
5227 * PARAMETERS : none
5228 *
5229 * RETURN     : true: needed
5230 *              false: no need
5231 *==========================================================================*/
5232bool QCamera3HardwareInterface::needReprocess()
5233{
5234    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
5235        // TODO: add for ZSL HDR later
5236        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5237        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5238        return true;
5239    }
5240    return needRotationReprocess();
5241}
5242
5243/*===========================================================================
5244 * FUNCTION   : addOfflineReprocChannel
5245 *
5246 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
5247 *              coming from input channel
5248 *
5249 * PARAMETERS :
5250 *   @pInputChannel : ptr to input channel whose frames will be post-processed
5251 *
5252 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
5253 *==========================================================================*/
5254QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
5255              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
5256{
5257    int32_t rc = NO_ERROR;
5258    QCamera3ReprocessChannel *pChannel = NULL;
5259    if (pInputChannel == NULL) {
5260        ALOGE("%s: input channel obj is NULL", __func__);
5261        return NULL;
5262    }
5263
5264    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5265            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5266    if (NULL == pChannel) {
5267        ALOGE("%s: no mem for reprocess channel", __func__);
5268        return NULL;
5269    }
5270
5271    rc = pChannel->initialize();
5272    if (rc != NO_ERROR) {
5273        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5274        delete pChannel;
5275        return NULL;
5276    }
5277
5278    // pp feature config
5279    cam_pp_feature_config_t pp_config;
5280    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5281
5282    if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
5283        cam_edge_application_t *edge = (cam_edge_application_t *)
5284                POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
5285        if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
5286            pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5287            pp_config.sharpness = edge->sharpness;
5288        }
5289    }
5290
5291    if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
5292        uint8_t *noise_mode = (uint8_t *)POINTER_OF(
5293                CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
5294        if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
5295            pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5296            pp_config.denoise2d.denoise_enable = 1;
5297            pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5298        }
5299    }
5300
5301    if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
5302        int32_t *rotation = (int32_t *)POINTER_OF(
5303                CAM_INTF_META_JPEG_ORIENTATION, metadata);
5304
5305        if (needRotationReprocess()) {
5306            pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5307            if (*rotation == 0) {
5308                pp_config.rotation = ROTATE_0;
5309            } else if (*rotation == 90) {
5310                pp_config.rotation = ROTATE_90;
5311            } else if (*rotation == 180) {
5312                pp_config.rotation = ROTATE_180;
5313            } else if (*rotation == 270) {
5314                pp_config.rotation = ROTATE_270;
5315            }
5316        }
5317    }
5318
5319    rc = pChannel->addReprocStreamsFromSource(pp_config,
5320                                             pInputChannel,
5321                                             mMetadataChannel);
5322
5323    if (rc != NO_ERROR) {
5324        delete pChannel;
5325        return NULL;
5326    }
5327    return pChannel;
5328}
5329
5330}; //end namespace qcamera
5331