QCamera3HWI.cpp revision 9770445fab16c26a86acb1dc261f2e41a687ddf8
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    mCallbackOps = callback_ops;
405
406    pthread_mutex_unlock(&mMutex);
407    mCameraInitialized = true;
408    return 0;
409
410err1:
411    pthread_mutex_unlock(&mMutex);
412    return rc;
413}
414
415/*===========================================================================
416 * FUNCTION   : configureStreams
417 *
418 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
419 *              and output streams.
420 *
421 * PARAMETERS :
422 *   @stream_list : streams to be configured
423 *
424 * RETURN     :
425 *
426 *==========================================================================*/
427int QCamera3HardwareInterface::configureStreams(
428        camera3_stream_configuration_t *streamList)
429{
430    int rc = 0;
431    mIsZslMode = false;
432
433    // Sanity check stream_list
434    if (streamList == NULL) {
435        ALOGE("%s: NULL stream configuration", __func__);
436        return BAD_VALUE;
437    }
438    if (streamList->streams == NULL) {
439        ALOGE("%s: NULL stream list", __func__);
440        return BAD_VALUE;
441    }
442
443    if (streamList->num_streams < 1) {
444        ALOGE("%s: Bad number of streams requested: %d", __func__,
445                streamList->num_streams);
446        return BAD_VALUE;
447    }
448
449    /* first invalidate all the steams in the mStreamList
450     * if they appear again, they will be validated */
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end(); it++) {
453        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454        channel->stop();
455        (*it)->status = INVALID;
456    }
457    if (mMetadataChannel) {
458        /* If content of mStreamInfo is not 0, there is metadata stream */
459        mMetadataChannel->stop();
460    }
461
462    pthread_mutex_lock(&mMutex);
463
464    camera3_stream_t *inputStream = NULL;
465    camera3_stream_t *jpegStream = NULL;
466    cam_stream_size_info_t stream_config_info;
467
468    for (size_t i = 0; i < streamList->num_streams; i++) {
469        camera3_stream_t *newStream = streamList->streams[i];
470        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
471                __func__, newStream->stream_type, newStream->format,
472                 newStream->width, newStream->height);
473        //if the stream is in the mStreamList validate it
474        bool stream_exists = false;
475        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
476                it != mStreamInfo.end(); it++) {
477            if ((*it)->stream == newStream) {
478                QCamera3Channel *channel =
479                    (QCamera3Channel*)(*it)->stream->priv;
480                stream_exists = true;
481                (*it)->status = RECONFIGURE;
482                /*delete the channel object associated with the stream because
483                  we need to reconfigure*/
484                delete channel;
485                (*it)->stream->priv = NULL;
486            }
487        }
488        if (!stream_exists) {
489            //new stream
490            stream_info_t* stream_info;
491            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
492            stream_info->stream = newStream;
493            stream_info->status = VALID;
494            stream_info->registered = 0;
495            mStreamInfo.push_back(stream_info);
496        }
497        if (newStream->stream_type == CAMERA3_STREAM_INPUT
498                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
499            if (inputStream != NULL) {
500                ALOGE("%s: Multiple input streams requested!", __func__);
501                pthread_mutex_unlock(&mMutex);
502                return BAD_VALUE;
503            }
504            inputStream = newStream;
505        }
506        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
507            jpegStream = newStream;
508        }
509    }
510    mInputStream = inputStream;
511
512    /*clean up invalid streams*/
513    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
514            it != mStreamInfo.end();) {
515        if(((*it)->status) == INVALID){
516            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
517            delete channel;
518            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
519            free(*it);
520            it = mStreamInfo.erase(it);
521        } else {
522            it++;
523        }
524    }
525    if (mMetadataChannel) {
526        delete mMetadataChannel;
527        mMetadataChannel = NULL;
528    }
529
530    //Create metadata channel and initialize it
531    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
532                    mCameraHandle->ops, captureResultCb,
533                    &gCamCapability[mCameraId]->padding_info, this);
534    if (mMetadataChannel == NULL) {
535        ALOGE("%s: failed to allocate metadata channel", __func__);
536        rc = -ENOMEM;
537        pthread_mutex_unlock(&mMutex);
538        return rc;
539    }
540    rc = mMetadataChannel->initialize();
541    if (rc < 0) {
542        ALOGE("%s: metadata channel initialization failed", __func__);
543        delete mMetadataChannel;
544        pthread_mutex_unlock(&mMutex);
545        return rc;
546    }
547
548    /* Allocate channel objects for the requested streams */
549    for (size_t i = 0; i < streamList->num_streams; i++) {
550        camera3_stream_t *newStream = streamList->streams[i];
551        uint32_t stream_usage = newStream->usage;
552        stream_config_info.stream_sizes[i].width = newStream->width;
553        stream_config_info.stream_sizes[i].height = newStream->height;
554        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
555            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
556            //for zsl stream the size is jpeg size
557            stream_config_info.stream_sizes[i].width = jpegStream->width;
558            stream_config_info.stream_sizes[i].height = jpegStream->height;
559            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
560        } else {
561           //for non zsl streams find out the format
562           switch (newStream->format) {
563           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
564              {
565                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
566                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
567                 } else {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
569                 }
570              }
571              break;
572           case HAL_PIXEL_FORMAT_YCbCr_420_888:
573              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
574              break;
575           case HAL_PIXEL_FORMAT_BLOB:
576              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
577              break;
578           default:
579              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
580              break;
581           }
582        }
583        if (newStream->priv == NULL) {
584            //New stream, construct channel
585            switch (newStream->stream_type) {
586            case CAMERA3_STREAM_INPUT:
587                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
588                break;
589            case CAMERA3_STREAM_BIDIRECTIONAL:
590                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
591                    GRALLOC_USAGE_HW_CAMERA_WRITE;
592                break;
593            case CAMERA3_STREAM_OUTPUT:
594                /* For video encoding stream, set read/write rarely
595                 * flag so that they may be set to un-cached */
596                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
597                    newStream->usage =
598                         (GRALLOC_USAGE_SW_READ_RARELY |
599                         GRALLOC_USAGE_SW_WRITE_RARELY |
600                         GRALLOC_USAGE_HW_CAMERA_WRITE);
601                else
602                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
603                break;
604            default:
605                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
606                break;
607            }
608
609            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
610                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
611                QCamera3Channel *channel;
612                switch (newStream->format) {
613                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
614                case HAL_PIXEL_FORMAT_YCbCr_420_888:
615                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
616                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
617                        jpegStream) {
618                        uint32_t width = jpegStream->width;
619                        uint32_t height = jpegStream->height;
620                        mIsZslMode = true;
621                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
622                            mCameraHandle->ops, captureResultCb,
623                            &gCamCapability[mCameraId]->padding_info, this, newStream,
624                            width, height);
625                    } else
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream);
629                    if (channel == NULL) {
630                        ALOGE("%s: allocation of channel failed", __func__);
631                        pthread_mutex_unlock(&mMutex);
632                        return -ENOMEM;
633                    }
634
635                    newStream->priv = channel;
636                    break;
637                case HAL_PIXEL_FORMAT_BLOB:
638                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
639                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
640                            mCameraHandle->ops, captureResultCb,
641                            &gCamCapability[mCameraId]->padding_info, this, newStream);
642                    if (mPictureChannel == NULL) {
643                        ALOGE("%s: allocation of channel failed", __func__);
644                        pthread_mutex_unlock(&mMutex);
645                        return -ENOMEM;
646                    }
647                    newStream->priv = (QCamera3Channel*)mPictureChannel;
648                    break;
649
650                //TODO: Add support for app consumed format?
651                default:
652                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
653                    break;
654                }
655            }
656        } else {
657            // Channel already exists for this stream
658            // Do nothing for now
659        }
660    }
661    /*For the streams to be reconfigured we need to register the buffers
662      since the framework wont*/
663    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664            it != mStreamInfo.end(); it++) {
665        if ((*it)->status == RECONFIGURE) {
666            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
667            /*only register buffers for streams that have already been
668              registered*/
669            if ((*it)->registered) {
670                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
671                        (*it)->buffer_set.buffers);
672                if (rc != NO_ERROR) {
673                    ALOGE("%s: Failed to register the buffers of old stream,\
674                            rc = %d", __func__, rc);
675                }
676                ALOGV("%s: channel %p has %d buffers",
677                        __func__, channel, (*it)->buffer_set.num_buffers);
678            }
679        }
680
681        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
682        if (index == NAME_NOT_FOUND) {
683            mPendingBuffersMap.add((*it)->stream, 0);
684        } else {
685            mPendingBuffersMap.editValueAt(index) = 0;
686        }
687    }
688
689    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
690    mPendingRequestsList.clear();
691
692    /*flush the metadata list*/
693    if (!mStoredMetadataList.empty()) {
694        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
695              m != mStoredMetadataList.end(); m++) {
696            mMetadataChannel->bufDone(m->meta_buf);
697            free(m->meta_buf);
698            m = mStoredMetadataList.erase(m);
699        }
700    }
701    int32_t hal_version = CAM_HAL_V3;
702    stream_config_info.num_streams = streamList->num_streams;
703
704    //settings/parameters don't carry over for new configureStreams
705    memset(mParameters, 0, sizeof(parm_buffer_t));
706
707    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
708    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
709                sizeof(hal_version), &hal_version);
710
711    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
712                sizeof(stream_config_info), &stream_config_info);
713
714    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
715
716    mFirstRequest = true;
717
718    //Get min frame duration for this streams configuration
719    deriveMinFrameDuration();
720
721    pthread_mutex_unlock(&mMutex);
722    return rc;
723}
724
725/*===========================================================================
726 * FUNCTION   : validateCaptureRequest
727 *
728 * DESCRIPTION: validate a capture request from camera service
729 *
730 * PARAMETERS :
731 *   @request : request from framework to process
732 *
733 * RETURN     :
734 *
735 *==========================================================================*/
736int QCamera3HardwareInterface::validateCaptureRequest(
737                    camera3_capture_request_t *request)
738{
739    ssize_t idx = 0;
740    const camera3_stream_buffer_t *b;
741    CameraMetadata meta;
742
743    /* Sanity check the request */
744    if (request == NULL) {
745        ALOGE("%s: NULL capture request", __func__);
746        return BAD_VALUE;
747    }
748
749    uint32_t frameNumber = request->frame_number;
750    if (request->input_buffer != NULL &&
751            request->input_buffer->stream != mInputStream) {
752        ALOGE("%s: Request %d: Input buffer not from input stream!",
753                __FUNCTION__, frameNumber);
754        return BAD_VALUE;
755    }
756    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
757        ALOGE("%s: Request %d: No output buffers provided!",
758                __FUNCTION__, frameNumber);
759        return BAD_VALUE;
760    }
761    if (request->input_buffer != NULL) {
762        b = request->input_buffer;
763        QCamera3Channel *channel =
764            static_cast<QCamera3Channel*>(b->stream->priv);
765        if (channel == NULL) {
766            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
767                    __func__, frameNumber, idx);
768            return BAD_VALUE;
769        }
770        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
771            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
772                    __func__, frameNumber, idx);
773            return BAD_VALUE;
774        }
775        if (b->release_fence != -1) {
776            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
777                    __func__, frameNumber, idx);
778            return BAD_VALUE;
779        }
780        if (b->buffer == NULL) {
781            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
782                    __func__, frameNumber, idx);
783            return BAD_VALUE;
784        }
785    }
786
787    // Validate all buffers
788    b = request->output_buffers;
789    do {
790        QCamera3Channel *channel =
791                static_cast<QCamera3Channel*>(b->stream->priv);
792        if (channel == NULL) {
793            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
794                    __func__, frameNumber, idx);
795            return BAD_VALUE;
796        }
797        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
798            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->release_fence != -1) {
803            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807        if (b->buffer == NULL) {
808            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
809                    __func__, frameNumber, idx);
810            return BAD_VALUE;
811        }
812        idx++;
813        b = request->output_buffers + idx;
814    } while (idx < (ssize_t)request->num_output_buffers);
815
816    return NO_ERROR;
817}
818
819/*===========================================================================
820 * FUNCTION   : deriveMinFrameDuration
821 *
822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
823 *              on currently configured streams.
824 *
825 * PARAMETERS : NONE
826 *
827 * RETURN     : NONE
828 *
829 *==========================================================================*/
830void QCamera3HardwareInterface::deriveMinFrameDuration()
831{
832    int32_t maxJpegDimension, maxProcessedDimension;
833
834    maxJpegDimension = 0;
835    maxProcessedDimension = 0;
836
837    // Figure out maximum jpeg, processed, and raw dimensions
838    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
839        it != mStreamInfo.end(); it++) {
840
841        // Input stream doesn't have valid stream_type
842        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
843            continue;
844
845        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
846        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
847            if (dimension > maxJpegDimension)
848                maxJpegDimension = dimension;
849        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
850            if (dimension > maxProcessedDimension)
851                maxProcessedDimension = dimension;
852        }
853    }
854
855    //Assume all jpeg dimensions are in processed dimensions.
856    if (maxJpegDimension > maxProcessedDimension)
857        maxProcessedDimension = maxJpegDimension;
858
859    //Find minimum durations for processed, jpeg, and raw
860    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
861    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
862        if (maxProcessedDimension ==
863            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
864            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
865            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
866            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
867            break;
868        }
869    }
870}
871
872/*===========================================================================
873 * FUNCTION   : getMinFrameDuration
874 *
875 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
876 *              and current request configuration.
877 *
878 * PARAMETERS : @request: requset sent by the frameworks
879 *
880 * RETURN     : min farme duration for a particular request
881 *
882 *==========================================================================*/
883int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
884{
885    bool hasJpegStream = false;
886    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
887        const camera3_stream_t *stream = request->output_buffers[i].stream;
888        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
889            hasJpegStream = true;
890    }
891
892    if (!hasJpegStream)
893        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
894    else
895        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
896}
897
898/*===========================================================================
899 * FUNCTION   : registerStreamBuffers
900 *
901 * DESCRIPTION: Register buffers for a given stream with the HAL device.
902 *
903 * PARAMETERS :
904 *   @stream_list : streams to be configured
905 *
906 * RETURN     :
907 *
908 *==========================================================================*/
909int QCamera3HardwareInterface::registerStreamBuffers(
910        const camera3_stream_buffer_set_t *buffer_set)
911{
912    int rc = 0;
913
914    pthread_mutex_lock(&mMutex);
915
916    if (buffer_set == NULL) {
917        ALOGE("%s: Invalid buffer_set parameter.", __func__);
918        pthread_mutex_unlock(&mMutex);
919        return -EINVAL;
920    }
921    if (buffer_set->stream == NULL) {
922        ALOGE("%s: Invalid stream parameter.", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return -EINVAL;
925    }
926    if (buffer_set->num_buffers < 1) {
927        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
928        pthread_mutex_unlock(&mMutex);
929        return -EINVAL;
930    }
931    if (buffer_set->buffers == NULL) {
932        ALOGE("%s: Invalid buffers parameter.", __func__);
933        pthread_mutex_unlock(&mMutex);
934        return -EINVAL;
935    }
936
937    camera3_stream_t *stream = buffer_set->stream;
938    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
939
940    //set the buffer_set in the mStreamInfo array
941    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
942            it != mStreamInfo.end(); it++) {
943        if ((*it)->stream == stream) {
944            uint32_t numBuffers = buffer_set->num_buffers;
945            (*it)->buffer_set.stream = buffer_set->stream;
946            (*it)->buffer_set.num_buffers = numBuffers;
947            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
948            if ((*it)->buffer_set.buffers == NULL) {
949                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
950                pthread_mutex_unlock(&mMutex);
951                return -ENOMEM;
952            }
953            for (size_t j = 0; j < numBuffers; j++){
954                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
955            }
956            (*it)->registered = 1;
957        }
958    }
959    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
960    if (rc < 0) {
961        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
962        pthread_mutex_unlock(&mMutex);
963        return -ENODEV;
964    }
965
966    pthread_mutex_unlock(&mMutex);
967    return NO_ERROR;
968}
969
970/*===========================================================================
971 * FUNCTION   : processCaptureRequest
972 *
973 * DESCRIPTION: process a capture request from camera service
974 *
975 * PARAMETERS :
976 *   @request : request from framework to process
977 *
978 * RETURN     :
979 *
980 *==========================================================================*/
981int QCamera3HardwareInterface::processCaptureRequest(
982                    camera3_capture_request_t *request)
983{
984    int rc = NO_ERROR;
985    int32_t request_id;
986    CameraMetadata meta;
987    MetadataBufferInfo reproc_meta;
988    int queueMetadata = 0;
989
990    pthread_mutex_lock(&mMutex);
991
992    rc = validateCaptureRequest(request);
993    if (rc != NO_ERROR) {
994        ALOGE("%s: incoming request is not valid", __func__);
995        pthread_mutex_unlock(&mMutex);
996        return rc;
997    }
998
999    meta = request->settings;
1000
1001    // For first capture request, send capture intent, and
1002    // stream on all streams
1003    if (mFirstRequest) {
1004
1005        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1006            int32_t hal_version = CAM_HAL_V3;
1007            uint8_t captureIntent =
1008                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1009
1010            memset(mParameters, 0, sizeof(parm_buffer_t));
1011            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1012            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1013                sizeof(hal_version), &hal_version);
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1015                sizeof(captureIntent), &captureIntent);
1016            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1017                mParameters);
1018        }
1019
1020        mMetadataChannel->start();
1021        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1022            it != mStreamInfo.end(); it++) {
1023            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1024            channel->start();
1025        }
1026    }
1027
1028    uint32_t frameNumber = request->frame_number;
1029    uint32_t streamTypeMask = 0;
1030
1031    if (meta.exists(ANDROID_REQUEST_ID)) {
1032        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1033        mCurrentRequestId = request_id;
1034        ALOGV("%s: Received request with id: %d",__func__, request_id);
1035    } else if (mFirstRequest || mCurrentRequestId == -1){
1036        ALOGE("%s: Unable to find request id field, \
1037                & no previous id available", __func__);
1038        return NAME_NOT_FOUND;
1039    } else {
1040        ALOGV("%s: Re-using old request id", __func__);
1041        request_id = mCurrentRequestId;
1042    }
1043
1044    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1045                                    __func__, __LINE__,
1046                                    request->num_output_buffers,
1047                                    request->input_buffer,
1048                                    frameNumber);
1049    // Acquire all request buffers first
1050    int blob_request = 0;
1051    for (size_t i = 0; i < request->num_output_buffers; i++) {
1052        const camera3_stream_buffer_t& output = request->output_buffers[i];
1053        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1054        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1055
1056        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1057        //Call function to store local copy of jpeg data for encode params.
1058            blob_request = 1;
1059            rc = getJpegSettings(request->settings);
1060            if (rc < 0) {
1061                ALOGE("%s: failed to get jpeg parameters", __func__);
1062                pthread_mutex_unlock(&mMutex);
1063                return rc;
1064            }
1065        }
1066
1067        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1068        if (rc != OK) {
1069            ALOGE("%s: fence wait failed %d", __func__, rc);
1070            pthread_mutex_unlock(&mMutex);
1071            return rc;
1072        }
1073        streamTypeMask |= channel->getStreamTypeMask();
1074    }
1075
1076    rc = setFrameParameters(request, streamTypeMask);
1077    if (rc < 0) {
1078        ALOGE("%s: fail to set frame parameters", __func__);
1079        pthread_mutex_unlock(&mMutex);
1080        return rc;
1081    }
1082
1083    /* Update pending request list and pending buffers map */
1084    PendingRequestInfo pendingRequest;
1085    pendingRequest.frame_number = frameNumber;
1086    pendingRequest.num_buffers = request->num_output_buffers;
1087    pendingRequest.request_id = request_id;
1088    pendingRequest.blob_request = blob_request;
1089    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1090
1091    for (size_t i = 0; i < request->num_output_buffers; i++) {
1092        RequestedBufferInfo requestedBuf;
1093        requestedBuf.stream = request->output_buffers[i].stream;
1094        requestedBuf.buffer = NULL;
1095        pendingRequest.buffers.push_back(requestedBuf);
1096
1097        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1098    }
1099    mPendingRequestsList.push_back(pendingRequest);
1100
1101    // Notify metadata channel we receive a request
1102    mMetadataChannel->request(NULL, frameNumber);
1103
1104    // Call request on other streams
1105    for (size_t i = 0; i < request->num_output_buffers; i++) {
1106        const camera3_stream_buffer_t& output = request->output_buffers[i];
1107        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1108        mm_camera_buf_def_t *pInputBuffer = NULL;
1109
1110        if (channel == NULL) {
1111            ALOGE("%s: invalid channel pointer for stream", __func__);
1112            continue;
1113        }
1114
1115        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1116            QCamera3RegularChannel* inputChannel = NULL;
1117            if(request->input_buffer != NULL){
1118                //Try to get the internal format
1119                inputChannel = (QCamera3RegularChannel*)
1120                    request->input_buffer->stream->priv;
1121                if(inputChannel == NULL ){
1122                    ALOGE("%s: failed to get input channel handle", __func__);
1123                } else {
1124                    pInputBuffer =
1125                        inputChannel->getInternalFormatBuffer(
1126                                request->input_buffer->buffer);
1127                    ALOGD("%s: Input buffer dump",__func__);
1128                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1129                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1130                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1131                    ALOGD("Handle:%p", request->input_buffer->buffer);
1132                    //TODO: need to get corresponding metadata and send it to pproc
1133                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1134                         m != mStoredMetadataList.end(); m++) {
1135                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1136                            reproc_meta.meta_buf = m->meta_buf;
1137                            queueMetadata = 1;
1138                            break;
1139                        }
1140                    }
1141                }
1142            }
1143            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1144                            pInputBuffer,(QCamera3Channel*)inputChannel);
1145            if (queueMetadata) {
1146                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1147            }
1148        } else {
1149            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1150                __LINE__, output.buffer, frameNumber);
1151            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1152                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1153                     m != mStoredMetadataList.end(); m++) {
1154                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1155                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1156                            mMetadataChannel->bufDone(m->meta_buf);
1157                            free(m->meta_buf);
1158                            m = mStoredMetadataList.erase(m);
1159                            break;
1160                        }
1161                   }
1162                }
1163            }
1164            rc = channel->request(output.buffer, frameNumber);
1165        }
1166        if (rc < 0)
1167            ALOGE("%s: request failed", __func__);
1168    }
1169
1170    mFirstRequest = false;
1171    // Added a timed condition wait
1172    struct timespec ts;
1173    uint8_t isValidTimeout = 1;
1174    rc = clock_gettime(CLOCK_REALTIME, &ts);
1175    if (rc < 0) {
1176        isValidTimeout = 0;
1177        ALOGE("%s: Error reading the real time clock!!", __func__);
1178    }
1179    else {
1180        // Make timeout as 5 sec for request to be honored
1181        ts.tv_sec += 5;
1182    }
1183    //Block on conditional variable
1184    mPendingRequest = 1;
1185    while (mPendingRequest == 1) {
1186        if (!isValidTimeout) {
1187            ALOGV("%s: Blocking on conditional wait", __func__);
1188            pthread_cond_wait(&mRequestCond, &mMutex);
1189        }
1190        else {
1191            ALOGV("%s: Blocking on timed conditional wait", __func__);
1192            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1193            if (rc == ETIMEDOUT) {
1194                rc = -ENODEV;
1195                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1196                break;
1197            }
1198        }
1199        ALOGV("%s: Unblocked", __func__);
1200    }
1201
1202    pthread_mutex_unlock(&mMutex);
1203    return rc;
1204}
1205
1206/*===========================================================================
1207 * FUNCTION   : getMetadataVendorTagOps
1208 *
1209 * DESCRIPTION:
1210 *
1211 * PARAMETERS :
1212 *
1213 *
1214 * RETURN     :
1215 *==========================================================================*/
1216void QCamera3HardwareInterface::getMetadataVendorTagOps(
1217                    vendor_tag_query_ops_t* /*ops*/)
1218{
1219    /* Enable locks when we eventually add Vendor Tags */
1220    /*
1221    pthread_mutex_lock(&mMutex);
1222
1223    pthread_mutex_unlock(&mMutex);
1224    */
1225    return;
1226}
1227
1228/*===========================================================================
1229 * FUNCTION   : dump
1230 *
1231 * DESCRIPTION:
1232 *
1233 * PARAMETERS :
1234 *
1235 *
1236 * RETURN     :
1237 *==========================================================================*/
1238void QCamera3HardwareInterface::dump(int /*fd*/)
1239{
1240    /*Enable lock when we implement this function*/
1241    /*
1242    pthread_mutex_lock(&mMutex);
1243
1244    pthread_mutex_unlock(&mMutex);
1245    */
1246    return;
1247}
1248
1249/*===========================================================================
1250 * FUNCTION   : flush
1251 *
1252 * DESCRIPTION:
1253 *
1254 * PARAMETERS :
1255 *
1256 *
1257 * RETURN     :
1258 *==========================================================================*/
1259int QCamera3HardwareInterface::flush()
1260{
1261    /*Enable lock when we implement this function*/
1262    /*
1263    pthread_mutex_lock(&mMutex);
1264
1265    pthread_mutex_unlock(&mMutex);
1266    */
1267    return 0;
1268}
1269
1270/*===========================================================================
1271 * FUNCTION   : captureResultCb
1272 *
1273 * DESCRIPTION: Callback handler for all capture result
1274 *              (streams, as well as metadata)
1275 *
1276 * PARAMETERS :
1277 *   @metadata : metadata information
1278 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1279 *               NULL if metadata.
1280 *
1281 * RETURN     : NONE
1282 *==========================================================================*/
1283void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1284                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1285{
1286    pthread_mutex_lock(&mMutex);
1287
1288    if (metadata_buf) {
1289        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1290        int32_t frame_number_valid = *(int32_t *)
1291            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1292        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1293            CAM_INTF_META_PENDING_REQUESTS, metadata);
1294        uint32_t frame_number = *(uint32_t *)
1295            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1296        const struct timeval *tv = (const struct timeval *)
1297            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1298        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1299            tv->tv_usec * NSEC_PER_USEC;
1300
1301        if (!frame_number_valid) {
1302            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1303            mMetadataChannel->bufDone(metadata_buf);
1304            goto done_metadata;
1305        }
1306        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1307                frame_number, capture_time);
1308
1309        // Go through the pending requests info and send shutter/results to frameworks
1310        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1311                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1312            camera3_capture_result_t result;
1313            camera3_notify_msg_t notify_msg;
1314            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1315
1316            // Flush out all entries with less or equal frame numbers.
1317
1318            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1319            //Right now it's the same as metadata timestamp
1320
1321            //TODO: When there is metadata drop, how do we derive the timestamp of
1322            //dropped frames? For now, we fake the dropped timestamp by substracting
1323            //from the reported timestamp
1324            nsecs_t current_capture_time = capture_time -
1325                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1326
1327            // Send shutter notify to frameworks
1328            notify_msg.type = CAMERA3_MSG_SHUTTER;
1329            notify_msg.message.shutter.frame_number = i->frame_number;
1330            notify_msg.message.shutter.timestamp = current_capture_time;
1331            mCallbackOps->notify(mCallbackOps, &notify_msg);
1332            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1333                    i->frame_number, capture_time);
1334
1335            // Send empty metadata with already filled buffers for dropped metadata
1336            // and send valid metadata with already filled buffers for current metadata
1337            if (i->frame_number < frame_number) {
1338                CameraMetadata dummyMetadata;
1339                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1340                        &current_capture_time, 1);
1341                dummyMetadata.update(ANDROID_REQUEST_ID,
1342                        &(i->request_id), 1);
1343                result.result = dummyMetadata.release();
1344            } else {
1345                result.result = translateCbMetadataToResultMetadata(metadata,
1346                        current_capture_time, i->request_id);
1347                if (mIsZslMode) {
1348                   int found_metadata = 0;
1349                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1350                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1351                        j != i->buffers.end(); j++) {
1352                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1353                         //check if corresp. zsl already exists in the stored metadata list
1354                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1355                               m != mStoredMetadataList.begin(); m++) {
1356                            if (m->frame_number == frame_number) {
1357                               m->meta_buf = metadata_buf;
1358                               found_metadata = 1;
1359                               break;
1360                            }
1361                         }
1362                         if (!found_metadata) {
1363                            MetadataBufferInfo store_meta_info;
1364                            store_meta_info.meta_buf = metadata_buf;
1365                            store_meta_info.frame_number = frame_number;
1366                            mStoredMetadataList.push_back(store_meta_info);
1367                            found_metadata = 1;
1368                         }
1369                      }
1370                   }
1371                   if (!found_metadata) {
1372                       if (!i->input_buffer_present && i->blob_request) {
1373                          //livesnapshot or fallback non-zsl snapshot case
1374                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1375                                j != i->buffers.end(); j++){
1376                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1377                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1378                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1379                                 break;
1380                              }
1381                         }
1382                       } else {
1383                            //return the metadata immediately
1384                            mMetadataChannel->bufDone(metadata_buf);
1385                            free(metadata_buf);
1386                       }
1387                   }
1388               } else if (!mIsZslMode && i->blob_request) {
1389                   //If it is a blob request then send the metadata to the picture channel
1390                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1391               } else {
1392                   // Return metadata buffer
1393                   mMetadataChannel->bufDone(metadata_buf);
1394                   free(metadata_buf);
1395               }
1396
1397            }
1398            if (!result.result) {
1399                ALOGE("%s: metadata is NULL", __func__);
1400            }
1401            result.frame_number = i->frame_number;
1402            result.num_output_buffers = 0;
1403            result.output_buffers = NULL;
1404            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1405                    j != i->buffers.end(); j++) {
1406                if (j->buffer) {
1407                    result.num_output_buffers++;
1408                }
1409            }
1410
1411            if (result.num_output_buffers > 0) {
1412                camera3_stream_buffer_t *result_buffers =
1413                    new camera3_stream_buffer_t[result.num_output_buffers];
1414                if (!result_buffers) {
1415                    ALOGE("%s: Fatal error: out of memory", __func__);
1416                }
1417                size_t result_buffers_idx = 0;
1418                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1419                        j != i->buffers.end(); j++) {
1420                    if (j->buffer) {
1421                        result_buffers[result_buffers_idx++] = *(j->buffer);
1422                        free(j->buffer);
1423                        j->buffer = NULL;
1424                        mPendingBuffersMap.editValueFor(j->stream)--;
1425                    }
1426                }
1427                result.output_buffers = result_buffers;
1428
1429                mCallbackOps->process_capture_result(mCallbackOps, &result);
1430                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1431                        __func__, result.frame_number, current_capture_time);
1432                free_camera_metadata((camera_metadata_t *)result.result);
1433                delete[] result_buffers;
1434            } else {
1435                mCallbackOps->process_capture_result(mCallbackOps, &result);
1436                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1437                        __func__, result.frame_number, current_capture_time);
1438                free_camera_metadata((camera_metadata_t *)result.result);
1439            }
1440            // erase the element from the list
1441            i = mPendingRequestsList.erase(i);
1442        }
1443
1444
1445done_metadata:
1446        bool max_buffers_dequeued = false;
1447        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1448            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1449            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1450            if (queued_buffers == stream->max_buffers) {
1451                max_buffers_dequeued = true;
1452                break;
1453            }
1454        }
1455        if (!max_buffers_dequeued && !pending_requests) {
1456            // Unblock process_capture_request
1457            mPendingRequest = 0;
1458            pthread_cond_signal(&mRequestCond);
1459        }
1460    } else {
1461        // If the frame number doesn't exist in the pending request list,
1462        // directly send the buffer to the frameworks, and update pending buffers map
1463        // Otherwise, book-keep the buffer.
1464        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1465        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1466            i++;
1467        }
1468        if (i == mPendingRequestsList.end()) {
1469            // Verify all pending requests frame_numbers are greater
1470            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1471                    j != mPendingRequestsList.end(); j++) {
1472                if (j->frame_number < frame_number) {
1473                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1474                            __func__, j->frame_number, frame_number);
1475                }
1476            }
1477            camera3_capture_result_t result;
1478            result.result = NULL;
1479            result.frame_number = frame_number;
1480            result.num_output_buffers = 1;
1481            result.output_buffers = buffer;
1482            ALOGV("%s: result frame_number = %d, buffer = %p",
1483                    __func__, frame_number, buffer);
1484            mPendingBuffersMap.editValueFor(buffer->stream)--;
1485            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1486                int found = 0;
1487                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1488                      k != mStoredMetadataList.end(); k++) {
1489                    if (k->frame_number == frame_number) {
1490                        k->zsl_buf_hdl = buffer->buffer;
1491                        found = 1;
1492                        break;
1493                    }
1494                }
1495                if (!found) {
1496                   MetadataBufferInfo meta_info;
1497                   meta_info.frame_number = frame_number;
1498                   meta_info.zsl_buf_hdl = buffer->buffer;
1499                   mStoredMetadataList.push_back(meta_info);
1500                }
1501            }
1502            mCallbackOps->process_capture_result(mCallbackOps, &result);
1503        } else {
1504            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1505                    j != i->buffers.end(); j++) {
1506                if (j->stream == buffer->stream) {
1507                    if (j->buffer != NULL) {
1508                        ALOGE("%s: Error: buffer is already set", __func__);
1509                    } else {
1510                        j->buffer = (camera3_stream_buffer_t *)malloc(
1511                                sizeof(camera3_stream_buffer_t));
1512                        *(j->buffer) = *buffer;
1513                        ALOGV("%s: cache buffer %p at result frame_number %d",
1514                                __func__, buffer, frame_number);
1515                    }
1516                }
1517            }
1518        }
1519    }
1520    pthread_mutex_unlock(&mMutex);
1521    return;
1522}
1523
1524/*===========================================================================
1525 * FUNCTION   : translateCbMetadataToResultMetadata
1526 *
1527 * DESCRIPTION:
1528 *
1529 * PARAMETERS :
1530 *   @metadata : metadata information from callback
1531 *
1532 * RETURN     : camera_metadata_t*
1533 *              metadata in a format specified by fwk
1534 *==========================================================================*/
1535camera_metadata_t*
1536QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1537                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1538                                 int32_t request_id)
1539{
1540    CameraMetadata camMetadata;
1541    camera_metadata_t* resultMetadata;
1542
1543    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1544    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1545
1546    /*CAM_INTF_META_HISTOGRAM - TODO*/
1547    /*cam_hist_stats_t  *histogram =
1548      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1549      metadata);*/
1550
1551    /*face detection*/
1552    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1553        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1554    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1555    int32_t faceIds[numFaces];
1556    uint8_t faceScores[numFaces];
1557    int32_t faceRectangles[numFaces * 4];
1558    int32_t faceLandmarks[numFaces * 6];
1559    int j = 0, k = 0;
1560    for (int i = 0; i < numFaces; i++) {
1561        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1562        faceScores[i] = faceDetectionInfo->faces[i].score;
1563        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1564                faceRectangles+j, -1);
1565        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1566        j+= 4;
1567        k+= 6;
1568    }
1569    if (numFaces > 0) {
1570        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1571        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1572        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1573            faceRectangles, numFaces*4);
1574        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1575            faceLandmarks, numFaces*6);
1576    }
1577
1578    uint8_t  *color_correct_mode =
1579        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1580    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1581
1582    int32_t  *ae_precapture_id =
1583        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1584    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1585
1586    /*aec regions*/
1587    cam_area_t  *hAeRegions =
1588        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1589    int32_t aeRegions[5];
1590    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1591    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1592
1593    uint8_t *ae_state =
1594            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1595    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1596
1597    uint8_t  *focusMode =
1598        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1599    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1600
1601    /*af regions*/
1602    cam_area_t  *hAfRegions =
1603        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1604    int32_t afRegions[5];
1605    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1606    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1607
1608    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1609    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1610
1611    int32_t  *afTriggerId =
1612        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1613    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1614
1615    uint8_t  *whiteBalance =
1616        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1617    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1618
1619    /*awb regions*/
1620    cam_area_t  *hAwbRegions =
1621        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1622    int32_t awbRegions[5];
1623    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1624    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1625
1626    uint8_t  *whiteBalanceState =
1627        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1628    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1629
1630    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1631    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1632
1633    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1634    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1635
1636    uint8_t  *flashPower =
1637        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1638    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1639
1640    int64_t  *flashFiringTime =
1641        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1642    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1643
1644    /*int32_t  *ledMode =
1645      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1646      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1647
1648    uint8_t  *flashState =
1649        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1650    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1651
1652    uint8_t  *hotPixelMode =
1653        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1654    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1655
1656    float  *lensAperture =
1657        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1658    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1659
1660    float  *filterDensity =
1661        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1662    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1663
1664    float  *focalLength =
1665        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1666    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1667
1668    float  *focusDistance =
1669        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1670    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1671
1672    float  *focusRange =
1673        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1674    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1675
1676    uint8_t  *opticalStab =
1677        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1678    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1679
1680    /*int32_t  *focusState =
1681      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1682      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1683
1684    uint8_t  *noiseRedMode =
1685        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1686    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1687
1688    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1689
1690    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1691        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1692    int32_t scalerCropRegion[4];
1693    scalerCropRegion[0] = hScalerCropRegion->left;
1694    scalerCropRegion[1] = hScalerCropRegion->top;
1695    scalerCropRegion[2] = hScalerCropRegion->width;
1696    scalerCropRegion[3] = hScalerCropRegion->height;
1697    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1698
1699    int64_t  *sensorExpTime =
1700        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1701    mMetadataResponse.exposure_time = *sensorExpTime;
1702    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1703    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1704
1705    int64_t  *sensorFameDuration =
1706        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1707    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1708    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1709
1710    int32_t  *sensorSensitivity =
1711        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1712    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1713    mMetadataResponse.iso_speed = *sensorSensitivity;
1714    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1715
1716    uint8_t  *shadingMode =
1717        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1718    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1719
1720    uint8_t  *faceDetectMode =
1721        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1722    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1723        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1724        *faceDetectMode);
1725    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1726
1727    uint8_t  *histogramMode =
1728        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1729    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1730
1731    uint8_t  *sharpnessMapMode =
1732        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1733    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1734            sharpnessMapMode, 1);
1735
1736    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1737    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1738        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1739    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1740            (int32_t*)sharpnessMap->sharpness,
1741            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1742
1743    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1744        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1745    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1746    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1747    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1748                       (float*)lensShadingMap->lens_shading,
1749                       4*map_width*map_height);
1750
1751    //Populate CAM_INTF_META_TONEMAP_CURVES
1752    /* ch0 = G, ch 1 = B, ch 2 = R*/
1753    cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1754        POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1755    camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1756                       (float*)tonemap->curves[0].tonemap_points,
1757                       tonemap->tonemap_points_cnt * 2);
1758
1759    camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1760                       (float*)tonemap->curves[1].tonemap_points,
1761                       tonemap->tonemap_points_cnt * 2);
1762
1763    camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1764                       (float*)tonemap->curves[2].tonemap_points,
1765                       tonemap->tonemap_points_cnt * 2);
1766
1767    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1768        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1769    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1770
1771    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1772        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1773    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1774                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1775
1776    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1777        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1778    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1779                       predColorCorrectionGains->gains, 4);
1780
1781    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1782        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1783    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1784                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1785
1786    uint8_t *blackLevelLock = (uint8_t*)
1787        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1788    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1789
1790    uint8_t *sceneFlicker = (uint8_t*)
1791        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1792    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1793
1794
1795    resultMetadata = camMetadata.release();
1796    return resultMetadata;
1797}
1798
1799/*===========================================================================
1800 * FUNCTION   : convertToRegions
1801 *
1802 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1803 *
1804 * PARAMETERS :
1805 *   @rect   : cam_rect_t struct to convert
1806 *   @region : int32_t destination array
1807 *   @weight : if we are converting from cam_area_t, weight is valid
1808 *             else weight = -1
1809 *
1810 *==========================================================================*/
1811void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1812    region[0] = rect.left;
1813    region[1] = rect.top;
1814    region[2] = rect.left + rect.width;
1815    region[3] = rect.top + rect.height;
1816    if (weight > -1) {
1817        region[4] = weight;
1818    }
1819}
1820
1821/*===========================================================================
1822 * FUNCTION   : convertFromRegions
1823 *
1824 * DESCRIPTION: helper method to convert from array to cam_rect_t
1825 *
1826 * PARAMETERS :
1827 *   @rect   : cam_rect_t struct to convert
1828 *   @region : int32_t destination array
1829 *   @weight : if we are converting from cam_area_t, weight is valid
1830 *             else weight = -1
1831 *
1832 *==========================================================================*/
1833void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1834                                                   const camera_metadata_t *settings,
1835                                                   uint32_t tag){
1836    CameraMetadata frame_settings;
1837    frame_settings = settings;
1838    int32_t x_min = frame_settings.find(tag).data.i32[0];
1839    int32_t y_min = frame_settings.find(tag).data.i32[1];
1840    int32_t x_max = frame_settings.find(tag).data.i32[2];
1841    int32_t y_max = frame_settings.find(tag).data.i32[3];
1842    roi->weight = frame_settings.find(tag).data.i32[4];
1843    roi->rect.left = x_min;
1844    roi->rect.top = y_min;
1845    roi->rect.width = x_max - x_min;
1846    roi->rect.height = y_max - y_min;
1847}
1848
1849/*===========================================================================
1850 * FUNCTION   : resetIfNeededROI
1851 *
1852 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1853 *              crop region
1854 *
1855 * PARAMETERS :
1856 *   @roi       : cam_area_t struct to resize
1857 *   @scalerCropRegion : cam_crop_region_t region to compare against
1858 *
1859 *
1860 *==========================================================================*/
1861bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1862                                                 const cam_crop_region_t* scalerCropRegion)
1863{
1864    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1865    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1866    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1867    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1868    if ((roi_x_max < scalerCropRegion->left) ||
1869        (roi_y_max < scalerCropRegion->top)  ||
1870        (roi->rect.left > crop_x_max) ||
1871        (roi->rect.top > crop_y_max)){
1872        return false;
1873    }
1874    if (roi->rect.left < scalerCropRegion->left) {
1875        roi->rect.left = scalerCropRegion->left;
1876    }
1877    if (roi->rect.top < scalerCropRegion->top) {
1878        roi->rect.top = scalerCropRegion->top;
1879    }
1880    if (roi_x_max > crop_x_max) {
1881        roi_x_max = crop_x_max;
1882    }
1883    if (roi_y_max > crop_y_max) {
1884        roi_y_max = crop_y_max;
1885    }
1886    roi->rect.width = roi_x_max - roi->rect.left;
1887    roi->rect.height = roi_y_max - roi->rect.top;
1888    return true;
1889}
1890
1891/*===========================================================================
1892 * FUNCTION   : convertLandmarks
1893 *
1894 * DESCRIPTION: helper method to extract the landmarks from face detection info
1895 *
1896 * PARAMETERS :
1897 *   @face   : cam_rect_t struct to convert
1898 *   @landmarks : int32_t destination array
1899 *
1900 *
1901 *==========================================================================*/
1902void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1903{
1904    landmarks[0] = face.left_eye_center.x;
1905    landmarks[1] = face.left_eye_center.y;
1906    landmarks[2] = face.right_eye_center.y;
1907    landmarks[3] = face.right_eye_center.y;
1908    landmarks[4] = face.mouth_center.x;
1909    landmarks[5] = face.mouth_center.y;
1910}
1911
1912#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1913/*===========================================================================
1914 * FUNCTION   : initCapabilities
1915 *
1916 * DESCRIPTION: initialize camera capabilities in static data struct
1917 *
1918 * PARAMETERS :
1919 *   @cameraId  : camera Id
1920 *
1921 * RETURN     : int32_t type of status
1922 *              NO_ERROR  -- success
1923 *              none-zero failure code
1924 *==========================================================================*/
1925int QCamera3HardwareInterface::initCapabilities(int cameraId)
1926{
1927    int rc = 0;
1928    mm_camera_vtbl_t *cameraHandle = NULL;
1929    QCamera3HeapMemory *capabilityHeap = NULL;
1930
1931    cameraHandle = camera_open(cameraId);
1932    if (!cameraHandle) {
1933        ALOGE("%s: camera_open failed", __func__);
1934        rc = -1;
1935        goto open_failed;
1936    }
1937
1938    capabilityHeap = new QCamera3HeapMemory();
1939    if (capabilityHeap == NULL) {
1940        ALOGE("%s: creation of capabilityHeap failed", __func__);
1941        goto heap_creation_failed;
1942    }
1943    /* Allocate memory for capability buffer */
1944    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1945    if(rc != OK) {
1946        ALOGE("%s: No memory for cappability", __func__);
1947        goto allocate_failed;
1948    }
1949
1950    /* Map memory for capability buffer */
1951    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1952    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1953                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1954                                capabilityHeap->getFd(0),
1955                                sizeof(cam_capability_t));
1956    if(rc < 0) {
1957        ALOGE("%s: failed to map capability buffer", __func__);
1958        goto map_failed;
1959    }
1960
1961    /* Query Capability */
1962    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1963    if(rc < 0) {
1964        ALOGE("%s: failed to query capability",__func__);
1965        goto query_failed;
1966    }
1967    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1968    if (!gCamCapability[cameraId]) {
1969        ALOGE("%s: out of memory", __func__);
1970        goto query_failed;
1971    }
1972    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1973                                        sizeof(cam_capability_t));
1974    rc = 0;
1975
1976query_failed:
1977    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1978                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1979map_failed:
1980    capabilityHeap->deallocate();
1981allocate_failed:
1982    delete capabilityHeap;
1983heap_creation_failed:
1984    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1985    cameraHandle = NULL;
1986open_failed:
1987    return rc;
1988}
1989
1990/*===========================================================================
1991 * FUNCTION   : initParameters
1992 *
1993 * DESCRIPTION: initialize camera parameters
1994 *
1995 * PARAMETERS :
1996 *
1997 * RETURN     : int32_t type of status
1998 *              NO_ERROR  -- success
1999 *              none-zero failure code
2000 *==========================================================================*/
2001int QCamera3HardwareInterface::initParameters()
2002{
2003    int rc = 0;
2004
2005    //Allocate Set Param Buffer
2006    mParamHeap = new QCamera3HeapMemory();
2007    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2008    if(rc != OK) {
2009        rc = NO_MEMORY;
2010        ALOGE("Failed to allocate SETPARM Heap memory");
2011        delete mParamHeap;
2012        mParamHeap = NULL;
2013        return rc;
2014    }
2015
2016    //Map memory for parameters buffer
2017    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2018            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2019            mParamHeap->getFd(0),
2020            sizeof(parm_buffer_t));
2021    if(rc < 0) {
2022        ALOGE("%s:failed to map SETPARM buffer",__func__);
2023        rc = FAILED_TRANSACTION;
2024        mParamHeap->deallocate();
2025        delete mParamHeap;
2026        mParamHeap = NULL;
2027        return rc;
2028    }
2029
2030    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2031    return rc;
2032}
2033
2034/*===========================================================================
2035 * FUNCTION   : deinitParameters
2036 *
2037 * DESCRIPTION: de-initialize camera parameters
2038 *
2039 * PARAMETERS :
2040 *
2041 * RETURN     : NONE
2042 *==========================================================================*/
2043void QCamera3HardwareInterface::deinitParameters()
2044{
2045    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2046            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2047
2048    mParamHeap->deallocate();
2049    delete mParamHeap;
2050    mParamHeap = NULL;
2051
2052    mParameters = NULL;
2053}
2054
2055/*===========================================================================
2056 * FUNCTION   : calcMaxJpegSize
2057 *
2058 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2059 *
2060 * PARAMETERS :
2061 *
2062 * RETURN     : max_jpeg_size
2063 *==========================================================================*/
2064int QCamera3HardwareInterface::calcMaxJpegSize()
2065{
2066    int32_t max_jpeg_size = 0;
2067    int temp_width, temp_height;
2068    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2069        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2070        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2071        if (temp_width * temp_height > max_jpeg_size ) {
2072            max_jpeg_size = temp_width * temp_height;
2073        }
2074    }
2075    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2076    return max_jpeg_size;
2077}
2078
2079/*===========================================================================
2080 * FUNCTION   : initStaticMetadata
2081 *
2082 * DESCRIPTION: initialize the static metadata
2083 *
2084 * PARAMETERS :
2085 *   @cameraId  : camera Id
2086 *
2087 * RETURN     : int32_t type of status
2088 *              0  -- success
2089 *              non-zero failure code
2090 *==========================================================================*/
2091int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2092{
2093    int rc = 0;
2094    CameraMetadata staticInfo;
2095
2096    /* android.info: hardware level */
2097    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2098    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2099        &supportedHardwareLevel, 1);
2100
2101    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2102    /*HAL 3 only*/
2103    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2104                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2105
2106    /*hard coded for now but this should come from sensor*/
2107    float min_focus_distance;
2108    if(facingBack){
2109        min_focus_distance = 10;
2110    } else {
2111        min_focus_distance = 0;
2112    }
2113    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2114                    &min_focus_distance, 1);
2115
2116    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2117                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2118
2119    /*should be using focal lengths but sensor doesn't provide that info now*/
2120    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2121                      &gCamCapability[cameraId]->focal_length,
2122                      1);
2123
2124    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2125                      gCamCapability[cameraId]->apertures,
2126                      gCamCapability[cameraId]->apertures_count);
2127
2128    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2129                gCamCapability[cameraId]->filter_densities,
2130                gCamCapability[cameraId]->filter_densities_count);
2131
2132
2133    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2134                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2135                      gCamCapability[cameraId]->optical_stab_modes_count);
2136
2137    staticInfo.update(ANDROID_LENS_POSITION,
2138                      gCamCapability[cameraId]->lens_position,
2139                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2140
2141    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2142                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2143    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2144                      lens_shading_map_size,
2145                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2146
2147    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2148                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2149    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2150            geo_correction_map_size,
2151            sizeof(geo_correction_map_size)/sizeof(int32_t));
2152
2153    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2154                       gCamCapability[cameraId]->geo_correction_map,
2155                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2156
2157    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2158            gCamCapability[cameraId]->sensor_physical_size, 2);
2159
2160    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2161            gCamCapability[cameraId]->exposure_time_range, 2);
2162
2163    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2164            &gCamCapability[cameraId]->max_frame_duration, 1);
2165
2166
2167    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2168                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2169
2170    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2171                                               gCamCapability[cameraId]->pixel_array_size.height};
2172    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2173                      pixel_array_size, 2);
2174
2175    int32_t active_array_size[] = {0, 0,
2176                                                gCamCapability[cameraId]->active_array_size.width,
2177                                                gCamCapability[cameraId]->active_array_size.height};
2178    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2179                      active_array_size, 4);
2180
2181    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2182            &gCamCapability[cameraId]->white_level, 1);
2183
2184    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2185            gCamCapability[cameraId]->black_level_pattern, 4);
2186
2187    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2188                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2189
2190    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2191                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2192
2193    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2194                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2195
2196    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2197                      &gCamCapability[cameraId]->histogram_size, 1);
2198
2199    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2200            &gCamCapability[cameraId]->max_histogram_count, 1);
2201
2202    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2203                                                gCamCapability[cameraId]->sharpness_map_size.height};
2204
2205    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2206            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2207
2208    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2209            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2210
2211
2212    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2213                      &gCamCapability[cameraId]->raw_min_duration,
2214                       1);
2215
2216    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2217                                                HAL_PIXEL_FORMAT_BLOB};
2218    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2219    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2220                      scalar_formats,
2221                      scalar_formats_count);
2222
2223    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2224    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2225              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2226              available_processed_sizes);
2227    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2228                available_processed_sizes,
2229                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2230
2231    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2232                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2233                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2234
2235    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2236    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2237                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2238                 available_fps_ranges);
2239    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2240            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2241
2242    camera_metadata_rational exposureCompensationStep = {
2243            gCamCapability[cameraId]->exp_compensation_step.numerator,
2244            gCamCapability[cameraId]->exp_compensation_step.denominator};
2245    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2246                      &exposureCompensationStep, 1);
2247
2248    /*TO DO*/
2249    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2250    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2251                      availableVstabModes, sizeof(availableVstabModes));
2252
2253    /*HAL 1 and HAL 3 common*/
2254    float maxZoom = 4;
2255    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2256            &maxZoom, 1);
2257
2258    int32_t max3aRegions = 1;
2259    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2260            &max3aRegions, 1);
2261
2262    uint8_t availableFaceDetectModes[] = {
2263            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2264            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2265    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2266                      availableFaceDetectModes,
2267                      sizeof(availableFaceDetectModes));
2268
2269    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2270                                       gCamCapability[cameraId]->raw_dim.height};
2271    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2272                      raw_size,
2273                      sizeof(raw_size)/sizeof(uint32_t));
2274
2275    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2276                                                        gCamCapability[cameraId]->exposure_compensation_max};
2277    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2278            exposureCompensationRange,
2279            sizeof(exposureCompensationRange)/sizeof(int32_t));
2280
2281    uint8_t lensFacing = (facingBack) ?
2282            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2283    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2284
2285    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2286                available_processed_sizes,
2287                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2288
2289    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2290                      available_thumbnail_sizes,
2291                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2292
2293    int32_t max_jpeg_size = 0;
2294    int temp_width, temp_height;
2295    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2296        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2297        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2298        if (temp_width * temp_height > max_jpeg_size ) {
2299            max_jpeg_size = temp_width * temp_height;
2300        }
2301    }
2302    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2303    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2304                      &max_jpeg_size, 1);
2305
2306    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2307    int32_t size = 0;
2308    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2309        int val = lookupFwkName(EFFECT_MODES_MAP,
2310                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2311                                   gCamCapability[cameraId]->supported_effects[i]);
2312        if (val != NAME_NOT_FOUND) {
2313            avail_effects[size] = (uint8_t)val;
2314            size++;
2315        }
2316    }
2317    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2318                      avail_effects,
2319                      size);
2320
2321    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2322    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2323    int32_t supported_scene_modes_cnt = 0;
2324    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2325        int val = lookupFwkName(SCENE_MODES_MAP,
2326                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2327                                gCamCapability[cameraId]->supported_scene_modes[i]);
2328        if (val != NAME_NOT_FOUND) {
2329            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2330            supported_indexes[supported_scene_modes_cnt] = i;
2331            supported_scene_modes_cnt++;
2332        }
2333    }
2334
2335    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2336                      avail_scene_modes,
2337                      supported_scene_modes_cnt);
2338
2339    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2340    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2341                      supported_scene_modes_cnt,
2342                      scene_mode_overrides,
2343                      supported_indexes,
2344                      cameraId);
2345    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2346                      scene_mode_overrides,
2347                      supported_scene_modes_cnt*3);
2348
2349    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2350    size = 0;
2351    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2352        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2353                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2354                                 gCamCapability[cameraId]->supported_antibandings[i]);
2355        if (val != NAME_NOT_FOUND) {
2356            avail_antibanding_modes[size] = (uint8_t)val;
2357            size++;
2358        }
2359
2360    }
2361    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2362                      avail_antibanding_modes,
2363                      size);
2364
2365    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2366    size = 0;
2367    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2368        int val = lookupFwkName(FOCUS_MODES_MAP,
2369                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2370                                gCamCapability[cameraId]->supported_focus_modes[i]);
2371        if (val != NAME_NOT_FOUND) {
2372            avail_af_modes[size] = (uint8_t)val;
2373            size++;
2374        }
2375    }
2376    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2377                      avail_af_modes,
2378                      size);
2379
2380    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2381    size = 0;
2382    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2383        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2384                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2385                                    gCamCapability[cameraId]->supported_white_balances[i]);
2386        if (val != NAME_NOT_FOUND) {
2387            avail_awb_modes[size] = (uint8_t)val;
2388            size++;
2389        }
2390    }
2391    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2392                      avail_awb_modes,
2393                      size);
2394
2395    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2396    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2397      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2398
2399    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2400            available_flash_levels,
2401            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2402
2403
2404    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2405    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2406            &flashAvailable, 1);
2407
2408    uint8_t avail_ae_modes[5];
2409    size = 0;
2410    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2411        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2412        size++;
2413    }
2414    if (flashAvailable) {
2415        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2416        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2417        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2418    }
2419    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2420                      avail_ae_modes,
2421                      size);
2422
2423    int32_t sensitivity_range[2];
2424    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2425    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2426    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2427                      sensitivity_range,
2428                      sizeof(sensitivity_range) / sizeof(int32_t));
2429
2430    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2431                      &gCamCapability[cameraId]->max_analog_sensitivity,
2432                      1);
2433
2434    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2435                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2436                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2437
2438    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2439    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2440                      &sensor_orientation,
2441                      1);
2442
2443    int32_t max_output_streams[3] = {1, 3, 1};
2444    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2445                      max_output_streams,
2446                      3);
2447
2448    gStaticMetadata[cameraId] = staticInfo.release();
2449    return rc;
2450}
2451
2452/*===========================================================================
2453 * FUNCTION   : makeTable
2454 *
2455 * DESCRIPTION: make a table of sizes
2456 *
2457 * PARAMETERS :
2458 *
2459 *
2460 *==========================================================================*/
2461void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2462                                          int32_t* sizeTable)
2463{
2464    int j = 0;
2465    for (int i = 0; i < size; i++) {
2466        sizeTable[j] = dimTable[i].width;
2467        sizeTable[j+1] = dimTable[i].height;
2468        j+=2;
2469    }
2470}
2471
2472/*===========================================================================
2473 * FUNCTION   : makeFPSTable
2474 *
2475 * DESCRIPTION: make a table of fps ranges
2476 *
2477 * PARAMETERS :
2478 *
2479 *==========================================================================*/
2480void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2481                                          int32_t* fpsRangesTable)
2482{
2483    int j = 0;
2484    for (int i = 0; i < size; i++) {
2485        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2486        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2487        j+=2;
2488    }
2489}
2490
2491/*===========================================================================
2492 * FUNCTION   : makeOverridesList
2493 *
2494 * DESCRIPTION: make a list of scene mode overrides
2495 *
2496 * PARAMETERS :
2497 *
2498 *
2499 *==========================================================================*/
2500void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2501                                                  uint8_t size, uint8_t* overridesList,
2502                                                  uint8_t* supported_indexes,
2503                                                  int camera_id)
2504{
2505    /*daemon will give a list of overrides for all scene modes.
2506      However we should send the fwk only the overrides for the scene modes
2507      supported by the framework*/
2508    int j = 0, index = 0, supt = 0;
2509    uint8_t focus_override;
2510    for (int i = 0; i < size; i++) {
2511        supt = 0;
2512        index = supported_indexes[i];
2513        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2514        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2515                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2516                                                    overridesTable[index].awb_mode);
2517        focus_override = (uint8_t)overridesTable[index].af_mode;
2518        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2519           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2520              supt = 1;
2521              break;
2522           }
2523        }
2524        if (supt) {
2525           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2526                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2527                                              focus_override);
2528        } else {
2529           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2530        }
2531        j+=3;
2532    }
2533}
2534
2535/*===========================================================================
2536 * FUNCTION   : getPreviewHalPixelFormat
2537 *
2538 * DESCRIPTION: convert the format to type recognized by framework
2539 *
2540 * PARAMETERS : format : the format from backend
2541 *
2542 ** RETURN    : format recognized by framework
2543 *
2544 *==========================================================================*/
2545int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2546{
2547    int32_t halPixelFormat;
2548
2549    switch (format) {
2550    case CAM_FORMAT_YUV_420_NV12:
2551        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2552        break;
2553    case CAM_FORMAT_YUV_420_NV21:
2554        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2555        break;
2556    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2557        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2558        break;
2559    case CAM_FORMAT_YUV_420_YV12:
2560        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2561        break;
2562    case CAM_FORMAT_YUV_422_NV16:
2563    case CAM_FORMAT_YUV_422_NV61:
2564    default:
2565        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2566        break;
2567    }
2568    return halPixelFormat;
2569}
2570
2571/*===========================================================================
2572 * FUNCTION   : getSensorSensitivity
2573 *
2574 * DESCRIPTION: convert iso_mode to an integer value
2575 *
2576 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2577 *
2578 ** RETURN    : sensitivity supported by sensor
2579 *
2580 *==========================================================================*/
2581int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2582{
2583    int32_t sensitivity;
2584
2585    switch (iso_mode) {
2586    case CAM_ISO_MODE_100:
2587        sensitivity = 100;
2588        break;
2589    case CAM_ISO_MODE_200:
2590        sensitivity = 200;
2591        break;
2592    case CAM_ISO_MODE_400:
2593        sensitivity = 400;
2594        break;
2595    case CAM_ISO_MODE_800:
2596        sensitivity = 800;
2597        break;
2598    case CAM_ISO_MODE_1600:
2599        sensitivity = 1600;
2600        break;
2601    default:
2602        sensitivity = -1;
2603        break;
2604    }
2605    return sensitivity;
2606}
2607
2608
2609/*===========================================================================
2610 * FUNCTION   : AddSetParmEntryToBatch
2611 *
2612 * DESCRIPTION: add set parameter entry into batch
2613 *
2614 * PARAMETERS :
2615 *   @p_table     : ptr to parameter buffer
2616 *   @paramType   : parameter type
2617 *   @paramLength : length of parameter value
2618 *   @paramValue  : ptr to parameter value
2619 *
2620 * RETURN     : int32_t type of status
2621 *              NO_ERROR  -- success
2622 *              none-zero failure code
2623 *==========================================================================*/
2624int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2625                                                          cam_intf_parm_type_t paramType,
2626                                                          uint32_t paramLength,
2627                                                          void *paramValue)
2628{
2629    int position = paramType;
2630    int current, next;
2631
2632    /*************************************************************************
2633    *                 Code to take care of linking next flags                *
2634    *************************************************************************/
2635    current = GET_FIRST_PARAM_ID(p_table);
2636    if (position == current){
2637        //DO NOTHING
2638    } else if (position < current){
2639        SET_NEXT_PARAM_ID(position, p_table, current);
2640        SET_FIRST_PARAM_ID(p_table, position);
2641    } else {
2642        /* Search for the position in the linked list where we need to slot in*/
2643        while (position > GET_NEXT_PARAM_ID(current, p_table))
2644            current = GET_NEXT_PARAM_ID(current, p_table);
2645
2646        /*If node already exists no need to alter linking*/
2647        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2648            next = GET_NEXT_PARAM_ID(current, p_table);
2649            SET_NEXT_PARAM_ID(current, p_table, position);
2650            SET_NEXT_PARAM_ID(position, p_table, next);
2651        }
2652    }
2653
2654    /*************************************************************************
2655    *                   Copy contents into entry                             *
2656    *************************************************************************/
2657
2658    if (paramLength > sizeof(parm_type_t)) {
2659        ALOGE("%s:Size of input larger than max entry size",__func__);
2660        return BAD_VALUE;
2661    }
2662    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2663    return NO_ERROR;
2664}
2665
2666/*===========================================================================
2667 * FUNCTION   : lookupFwkName
2668 *
2669 * DESCRIPTION: In case the enum is not same in fwk and backend
2670 *              make sure the parameter is correctly propogated
2671 *
2672 * PARAMETERS  :
2673 *   @arr      : map between the two enums
2674 *   @len      : len of the map
2675 *   @hal_name : name of the hal_parm to map
2676 *
2677 * RETURN     : int type of status
2678 *              fwk_name  -- success
2679 *              none-zero failure code
2680 *==========================================================================*/
2681int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2682                                             int len, int hal_name)
2683{
2684
2685    for (int i = 0; i < len; i++) {
2686        if (arr[i].hal_name == hal_name)
2687            return arr[i].fwk_name;
2688    }
2689
2690    /* Not able to find matching framework type is not necessarily
2691     * an error case. This happens when mm-camera supports more attributes
2692     * than the frameworks do */
2693    ALOGD("%s: Cannot find matching framework type", __func__);
2694    return NAME_NOT_FOUND;
2695}
2696
2697/*===========================================================================
2698 * FUNCTION   : lookupHalName
2699 *
2700 * DESCRIPTION: In case the enum is not same in fwk and backend
2701 *              make sure the parameter is correctly propogated
2702 *
2703 * PARAMETERS  :
2704 *   @arr      : map between the two enums
2705 *   @len      : len of the map
2706 *   @fwk_name : name of the hal_parm to map
2707 *
2708 * RETURN     : int32_t type of status
2709 *              hal_name  -- success
2710 *              none-zero failure code
2711 *==========================================================================*/
2712int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2713                                             int len, int fwk_name)
2714{
2715    for (int i = 0; i < len; i++) {
2716       if (arr[i].fwk_name == fwk_name)
2717           return arr[i].hal_name;
2718    }
2719    ALOGE("%s: Cannot find matching hal type", __func__);
2720    return NAME_NOT_FOUND;
2721}
2722
2723/*===========================================================================
2724 * FUNCTION   : getCapabilities
2725 *
2726 * DESCRIPTION: query camera capabilities
2727 *
2728 * PARAMETERS :
2729 *   @cameraId  : camera Id
2730 *   @info      : camera info struct to be filled in with camera capabilities
2731 *
2732 * RETURN     : int32_t type of status
2733 *              NO_ERROR  -- success
2734 *              none-zero failure code
2735 *==========================================================================*/
2736int QCamera3HardwareInterface::getCamInfo(int cameraId,
2737                                    struct camera_info *info)
2738{
2739    int rc = 0;
2740
2741    if (NULL == gCamCapability[cameraId]) {
2742        rc = initCapabilities(cameraId);
2743        if (rc < 0) {
2744            //pthread_mutex_unlock(&g_camlock);
2745            return rc;
2746        }
2747    }
2748
2749    if (NULL == gStaticMetadata[cameraId]) {
2750        rc = initStaticMetadata(cameraId);
2751        if (rc < 0) {
2752            return rc;
2753        }
2754    }
2755
2756    switch(gCamCapability[cameraId]->position) {
2757    case CAM_POSITION_BACK:
2758        info->facing = CAMERA_FACING_BACK;
2759        break;
2760
2761    case CAM_POSITION_FRONT:
2762        info->facing = CAMERA_FACING_FRONT;
2763        break;
2764
2765    default:
2766        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2767        rc = -1;
2768        break;
2769    }
2770
2771
2772    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2773    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2774    info->static_camera_characteristics = gStaticMetadata[cameraId];
2775
2776    return rc;
2777}
2778
2779/*===========================================================================
2780 * FUNCTION   : translateMetadata
2781 *
2782 * DESCRIPTION: translate the metadata into camera_metadata_t
2783 *
2784 * PARAMETERS : type of the request
2785 *
2786 *
2787 * RETURN     : success: camera_metadata_t*
2788 *              failure: NULL
2789 *
2790 *==========================================================================*/
2791camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2792{
2793    pthread_mutex_lock(&mMutex);
2794
2795    if (mDefaultMetadata[type] != NULL) {
2796        pthread_mutex_unlock(&mMutex);
2797        return mDefaultMetadata[type];
2798    }
2799    //first time we are handling this request
2800    //fill up the metadata structure using the wrapper class
2801    CameraMetadata settings;
2802    //translate from cam_capability_t to camera_metadata_tag_t
2803    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2804    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2805
2806    /*control*/
2807
2808    uint8_t controlIntent = 0;
2809    switch (type) {
2810      case CAMERA3_TEMPLATE_PREVIEW:
2811        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2812        break;
2813      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2814        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2815        break;
2816      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2817        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2818        break;
2819      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2820        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2821        break;
2822      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2823        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2824        break;
2825      default:
2826        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2827        break;
2828    }
2829    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2830
2831    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2832            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2833
2834    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2835    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2836
2837    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2838    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2839
2840    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2841    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2842
2843    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2844    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2845
2846    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2847    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2848
2849    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2850    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2851
2852    static uint8_t focusMode;
2853    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2854        ALOGE("%s: Setting focus mode to auto", __func__);
2855        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2856    } else {
2857        ALOGE("%s: Setting focus mode to off", __func__);
2858        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2859    }
2860    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2861
2862    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2863    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2864
2865    /*flash*/
2866    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2867    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2868
2869    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2870    settings.update(ANDROID_FLASH_FIRING_POWER,
2871            &flashFiringLevel, 1);
2872
2873    /* lens */
2874    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2875    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2876
2877    if (gCamCapability[mCameraId]->filter_densities_count) {
2878        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2879        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2880                        gCamCapability[mCameraId]->filter_densities_count);
2881    }
2882
2883    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2884    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2885
2886    /* frame duration */
2887    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2888    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2889
2890    /* sensitivity */
2891    static const int32_t default_sensitivity = 100;
2892    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2893
2894    /*edge mode*/
2895    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2896    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2897
2898    /*noise reduction mode*/
2899    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2900    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2901
2902    /*color correction mode*/
2903    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2904    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2905
2906    /*transform matrix mode*/
2907    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2908    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2909
2910    mDefaultMetadata[type] = settings.release();
2911
2912    pthread_mutex_unlock(&mMutex);
2913    return mDefaultMetadata[type];
2914}
2915
2916/*===========================================================================
2917 * FUNCTION   : setFrameParameters
2918 *
2919 * DESCRIPTION: set parameters per frame as requested in the metadata from
2920 *              framework
2921 *
2922 * PARAMETERS :
2923 *   @request   : request that needs to be serviced
2924 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2925 *
2926 * RETURN     : success: NO_ERROR
2927 *              failure:
2928 *==========================================================================*/
2929int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2930                    uint32_t streamTypeMask)
2931{
2932    /*translate from camera_metadata_t type to parm_type_t*/
2933    int rc = 0;
2934    if (request->settings == NULL && mFirstRequest) {
2935        /*settings cannot be null for the first request*/
2936        return BAD_VALUE;
2937    }
2938
2939    int32_t hal_version = CAM_HAL_V3;
2940
2941    memset(mParameters, 0, sizeof(parm_buffer_t));
2942    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2943    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2944                sizeof(hal_version), &hal_version);
2945    if (rc < 0) {
2946        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2947        return BAD_VALUE;
2948    }
2949
2950    /*we need to update the frame number in the parameters*/
2951    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2952                                sizeof(request->frame_number), &(request->frame_number));
2953    if (rc < 0) {
2954        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2955        return BAD_VALUE;
2956    }
2957
2958    /* Update stream id mask where buffers are requested */
2959    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2960                                sizeof(streamTypeMask), &streamTypeMask);
2961    if (rc < 0) {
2962        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2963        return BAD_VALUE;
2964    }
2965
2966    if(request->settings != NULL){
2967        rc = translateMetadataToParameters(request);
2968    }
2969    /*set the parameters to backend*/
2970    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2971    return rc;
2972}
2973
2974/*===========================================================================
2975 * FUNCTION   : translateMetadataToParameters
2976 *
2977 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2978 *
2979 *
2980 * PARAMETERS :
2981 *   @request  : request sent from framework
2982 *
2983 *
2984 * RETURN     : success: NO_ERROR
2985 *              failure:
2986 *==========================================================================*/
2987int QCamera3HardwareInterface::translateMetadataToParameters
2988                                  (const camera3_capture_request_t *request)
2989{
2990    int rc = 0;
2991    CameraMetadata frame_settings;
2992    frame_settings = request->settings;
2993
2994    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2995        int32_t antibandingMode =
2996            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2997        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2998                sizeof(antibandingMode), &antibandingMode);
2999    }
3000
3001    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3002        int32_t expCompensation = frame_settings.find(
3003            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3004        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3005            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3006        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3007            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3008        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3009          sizeof(expCompensation), &expCompensation);
3010    }
3011
3012    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3013        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3014        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3015                sizeof(aeLock), &aeLock);
3016    }
3017    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3018        cam_fps_range_t fps_range;
3019        fps_range.min_fps =
3020            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3021        fps_range.max_fps =
3022            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3023        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3024                sizeof(fps_range), &fps_range);
3025    }
3026
3027    float focalDistance = -1.0;
3028    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3029        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3030        rc = AddSetParmEntryToBatch(mParameters,
3031                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3032                sizeof(focalDistance), &focalDistance);
3033    }
3034
3035    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3036        uint8_t fwk_focusMode =
3037            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3038        uint8_t focusMode;
3039        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3040            focusMode = CAM_FOCUS_MODE_INFINITY;
3041        } else{
3042         focusMode = lookupHalName(FOCUS_MODES_MAP,
3043                                   sizeof(FOCUS_MODES_MAP),
3044                                   fwk_focusMode);
3045        }
3046        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3047                sizeof(focusMode), &focusMode);
3048    }
3049
3050    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3051        uint8_t awbLock =
3052            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3053        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3054                sizeof(awbLock), &awbLock);
3055    }
3056
3057    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3058        uint8_t fwk_whiteLevel =
3059            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3060        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3061                sizeof(WHITE_BALANCE_MODES_MAP),
3062                fwk_whiteLevel);
3063        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3064                sizeof(whiteLevel), &whiteLevel);
3065    }
3066
3067    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3068        uint8_t fwk_effectMode =
3069            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3070        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3071                sizeof(EFFECT_MODES_MAP),
3072                fwk_effectMode);
3073        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3074                sizeof(effectMode), &effectMode);
3075    }
3076
3077    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3078        uint8_t fwk_aeMode =
3079            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3080        uint8_t aeMode;
3081        int32_t redeye;
3082
3083        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3084            aeMode = CAM_AE_MODE_OFF;
3085        } else {
3086            aeMode = CAM_AE_MODE_ON;
3087        }
3088        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3089            redeye = 1;
3090        } else {
3091            redeye = 0;
3092        }
3093
3094        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3095                                          sizeof(AE_FLASH_MODE_MAP),
3096                                          fwk_aeMode);
3097        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3098                sizeof(aeMode), &aeMode);
3099        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3100                sizeof(flashMode), &flashMode);
3101        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3102                sizeof(redeye), &redeye);
3103    }
3104
3105    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3106        uint8_t colorCorrectMode =
3107            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3108        rc =
3109            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3110                    sizeof(colorCorrectMode), &colorCorrectMode);
3111    }
3112
3113    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3114        cam_color_correct_gains_t colorCorrectGains;
3115        for (int i = 0; i < 4; i++) {
3116            colorCorrectGains.gains[i] =
3117                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3118        }
3119        rc =
3120            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3121                    sizeof(colorCorrectGains), &colorCorrectGains);
3122    }
3123
3124    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3125        cam_color_correct_matrix_t colorCorrectTransform;
3126        cam_rational_type_t transform_elem;
3127        int num = 0;
3128        for (int i = 0; i < 3; i++) {
3129           for (int j = 0; j < 3; j++) {
3130              transform_elem.numerator =
3131                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3132              transform_elem.denominator =
3133                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3134              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3135              num++;
3136           }
3137        }
3138        rc =
3139            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3140                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3141    }
3142
3143    cam_trigger_t aecTrigger;
3144    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3145    aecTrigger.trigger_id = -1;
3146    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3147        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3148        aecTrigger.trigger =
3149            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3150        aecTrigger.trigger_id =
3151            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3152    }
3153    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3154                                sizeof(aecTrigger), &aecTrigger);
3155
3156    /*af_trigger must come with a trigger id*/
3157    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3158        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3159        cam_trigger_t af_trigger;
3160        af_trigger.trigger =
3161            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3162        af_trigger.trigger_id =
3163            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3164        rc = AddSetParmEntryToBatch(mParameters,
3165                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3166    }
3167
3168    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3169        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3170        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3171                sizeof(metaMode), &metaMode);
3172        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3173           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3174           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3175                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3176                                             fwk_sceneMode);
3177           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3178                sizeof(sceneMode), &sceneMode);
3179        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3180           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3181           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3182                sizeof(sceneMode), &sceneMode);
3183        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3184           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3185           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3186                sizeof(sceneMode), &sceneMode);
3187        }
3188    }
3189
3190    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3191        int32_t demosaic =
3192            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3193        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3194                sizeof(demosaic), &demosaic);
3195    }
3196
3197    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3198        cam_edge_application_t edge_application;
3199        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3200        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3201            edge_application.sharpness = 0;
3202        } else {
3203            edge_application.sharpness = 10;
3204        }
3205        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3206                sizeof(edge_application), &edge_application);
3207    }
3208
3209    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3210        int32_t edgeStrength =
3211            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3212        rc = AddSetParmEntryToBatch(mParameters,
3213                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3214    }
3215
3216    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3217        int32_t respectFlashMode = 1;
3218        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3219            uint8_t fwk_aeMode =
3220                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3221            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3222                respectFlashMode = 0;
3223                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3224                    __func__);
3225            }
3226        }
3227        if (respectFlashMode) {
3228            uint8_t flashMode =
3229                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3230            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3231                                          sizeof(FLASH_MODES_MAP),
3232                                          flashMode);
3233            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3234            // To check: CAM_INTF_META_FLASH_MODE usage
3235            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3236                          sizeof(flashMode), &flashMode);
3237        }
3238    }
3239
3240    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3241        uint8_t flashPower =
3242            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3243        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3244                sizeof(flashPower), &flashPower);
3245    }
3246
3247    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3248        int64_t flashFiringTime =
3249            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3250        rc = AddSetParmEntryToBatch(mParameters,
3251                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3252    }
3253
3254    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3255        uint8_t geometricMode =
3256            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3257        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3258                sizeof(geometricMode), &geometricMode);
3259    }
3260
3261    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3262        uint8_t geometricStrength =
3263            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3264        rc = AddSetParmEntryToBatch(mParameters,
3265                CAM_INTF_META_GEOMETRIC_STRENGTH,
3266                sizeof(geometricStrength), &geometricStrength);
3267    }
3268
3269    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3270        uint8_t hotPixelMode =
3271            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3272        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3273                sizeof(hotPixelMode), &hotPixelMode);
3274    }
3275
3276    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3277        float lensAperture =
3278            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3279        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3280                sizeof(lensAperture), &lensAperture);
3281    }
3282
3283    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3284        float filterDensity =
3285            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3286        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3287                sizeof(filterDensity), &filterDensity);
3288    }
3289
3290    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3291        float focalLength =
3292            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3293        rc = AddSetParmEntryToBatch(mParameters,
3294                CAM_INTF_META_LENS_FOCAL_LENGTH,
3295                sizeof(focalLength), &focalLength);
3296    }
3297
3298    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3299        uint8_t optStabMode =
3300            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3301        rc = AddSetParmEntryToBatch(mParameters,
3302                CAM_INTF_META_LENS_OPT_STAB_MODE,
3303                sizeof(optStabMode), &optStabMode);
3304    }
3305
3306    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3307        uint8_t noiseRedMode =
3308            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3309        rc = AddSetParmEntryToBatch(mParameters,
3310                CAM_INTF_META_NOISE_REDUCTION_MODE,
3311                sizeof(noiseRedMode), &noiseRedMode);
3312    }
3313
3314    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3315        uint8_t noiseRedStrength =
3316            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3317        rc = AddSetParmEntryToBatch(mParameters,
3318                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3319                sizeof(noiseRedStrength), &noiseRedStrength);
3320    }
3321
3322    cam_crop_region_t scalerCropRegion;
3323    bool scalerCropSet = false;
3324    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3325        scalerCropRegion.left =
3326            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3327        scalerCropRegion.top =
3328            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3329        scalerCropRegion.width =
3330            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3331        scalerCropRegion.height =
3332            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3333        rc = AddSetParmEntryToBatch(mParameters,
3334                CAM_INTF_META_SCALER_CROP_REGION,
3335                sizeof(scalerCropRegion), &scalerCropRegion);
3336        scalerCropSet = true;
3337    }
3338
3339    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3340        int64_t sensorExpTime =
3341            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3342        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3343        rc = AddSetParmEntryToBatch(mParameters,
3344                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3345                sizeof(sensorExpTime), &sensorExpTime);
3346    }
3347
3348    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3349        int64_t sensorFrameDuration =
3350            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3351        int64_t minFrameDuration = getMinFrameDuration(request);
3352        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3353        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3354            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3355        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3356        rc = AddSetParmEntryToBatch(mParameters,
3357                CAM_INTF_META_SENSOR_FRAME_DURATION,
3358                sizeof(sensorFrameDuration), &sensorFrameDuration);
3359    }
3360
3361    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3362        int32_t sensorSensitivity =
3363            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3364        if (sensorSensitivity <
3365                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3366            sensorSensitivity =
3367                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3368        if (sensorSensitivity >
3369                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3370            sensorSensitivity =
3371                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3372        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3373        rc = AddSetParmEntryToBatch(mParameters,
3374                CAM_INTF_META_SENSOR_SENSITIVITY,
3375                sizeof(sensorSensitivity), &sensorSensitivity);
3376    }
3377
3378    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3379        int32_t shadingMode =
3380            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3381        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3382                sizeof(shadingMode), &shadingMode);
3383    }
3384
3385    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3386        uint8_t shadingStrength =
3387            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3388        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3389                sizeof(shadingStrength), &shadingStrength);
3390    }
3391
3392    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3393        uint8_t fwk_facedetectMode =
3394            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3395        uint8_t facedetectMode =
3396            lookupHalName(FACEDETECT_MODES_MAP,
3397                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3398        rc = AddSetParmEntryToBatch(mParameters,
3399                CAM_INTF_META_STATS_FACEDETECT_MODE,
3400                sizeof(facedetectMode), &facedetectMode);
3401    }
3402
3403    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3404        uint8_t histogramMode =
3405            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3406        rc = AddSetParmEntryToBatch(mParameters,
3407                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3408                sizeof(histogramMode), &histogramMode);
3409    }
3410
3411    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3412        uint8_t sharpnessMapMode =
3413            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3414        rc = AddSetParmEntryToBatch(mParameters,
3415                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3416                sizeof(sharpnessMapMode), &sharpnessMapMode);
3417    }
3418
3419    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3420        uint8_t tonemapMode =
3421            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3422        rc = AddSetParmEntryToBatch(mParameters,
3423                CAM_INTF_META_TONEMAP_MODE,
3424                sizeof(tonemapMode), &tonemapMode);
3425    }
3426    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3427    /*All tonemap channels will have the same number of points*/
3428    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3429        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3430        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3431        cam_rgb_tonemap_curves tonemapCurves;
3432        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3433
3434        /* ch0 = G*/
3435        int point = 0;
3436        cam_tonemap_curve_t tonemapCurveGreen;
3437        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3438            for (int j = 0; j < 2; j++) {
3439               tonemapCurveGreen.tonemap_points[i][j] =
3440                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3441               point++;
3442            }
3443        }
3444        tonemapCurves.curves[0] = tonemapCurveGreen;
3445
3446        /* ch 1 = B */
3447        point = 0;
3448        cam_tonemap_curve_t tonemapCurveBlue;
3449        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3450            for (int j = 0; j < 2; j++) {
3451               tonemapCurveBlue.tonemap_points[i][j] =
3452                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3453               point++;
3454            }
3455        }
3456        tonemapCurves.curves[1] = tonemapCurveBlue;
3457
3458        /* ch 2 = R */
3459        point = 0;
3460        cam_tonemap_curve_t tonemapCurveRed;
3461        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3462            for (int j = 0; j < 2; j++) {
3463               tonemapCurveRed.tonemap_points[i][j] =
3464                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3465               point++;
3466            }
3467        }
3468        tonemapCurves.curves[2] = tonemapCurveRed;
3469
3470        rc = AddSetParmEntryToBatch(mParameters,
3471                CAM_INTF_META_TONEMAP_CURVES,
3472                sizeof(tonemapCurves), &tonemapCurves);
3473    }
3474
3475    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3476        uint8_t captureIntent =
3477            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3478        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3479                sizeof(captureIntent), &captureIntent);
3480    }
3481
3482    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3483        uint8_t blackLevelLock =
3484            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3485        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3486                sizeof(blackLevelLock), &blackLevelLock);
3487    }
3488
3489    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3490        uint8_t lensShadingMapMode =
3491            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3492        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3493                sizeof(lensShadingMapMode), &lensShadingMapMode);
3494    }
3495
3496    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3497        cam_area_t roi;
3498        bool reset = true;
3499        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3500        if (scalerCropSet) {
3501            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3502        }
3503        if (reset) {
3504            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3505                    sizeof(roi), &roi);
3506        }
3507    }
3508
3509    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3510        cam_area_t roi;
3511        bool reset = true;
3512        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3513        if (scalerCropSet) {
3514            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3515        }
3516        if (reset) {
3517            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3518                    sizeof(roi), &roi);
3519        }
3520    }
3521
3522    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3523        cam_area_t roi;
3524        bool reset = true;
3525        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3526        if (scalerCropSet) {
3527            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3528        }
3529        if (reset) {
3530            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3531                    sizeof(roi), &roi);
3532        }
3533    }
3534    return rc;
3535}
3536
3537/*===========================================================================
3538 * FUNCTION   : getJpegSettings
3539 *
3540 * DESCRIPTION: save the jpeg settings in the HAL
3541 *
3542 *
3543 * PARAMETERS :
3544 *   @settings  : frame settings information from framework
3545 *
3546 *
3547 * RETURN     : success: NO_ERROR
3548 *              failure:
3549 *==========================================================================*/
3550int QCamera3HardwareInterface::getJpegSettings
3551                                  (const camera_metadata_t *settings)
3552{
3553    if (mJpegSettings) {
3554        if (mJpegSettings->gps_timestamp) {
3555            free(mJpegSettings->gps_timestamp);
3556            mJpegSettings->gps_timestamp = NULL;
3557        }
3558        if (mJpegSettings->gps_coordinates) {
3559            for (int i = 0; i < 3; i++) {
3560                free(mJpegSettings->gps_coordinates[i]);
3561                mJpegSettings->gps_coordinates[i] = NULL;
3562            }
3563        }
3564        free(mJpegSettings);
3565        mJpegSettings = NULL;
3566    }
3567    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3568    CameraMetadata jpeg_settings;
3569    jpeg_settings = settings;
3570
3571    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3572        mJpegSettings->jpeg_orientation =
3573            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3574    } else {
3575        mJpegSettings->jpeg_orientation = 0;
3576    }
3577    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3578        mJpegSettings->jpeg_quality =
3579            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3580    } else {
3581        mJpegSettings->jpeg_quality = 85;
3582    }
3583    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3584        mJpegSettings->thumbnail_size.width =
3585            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3586        mJpegSettings->thumbnail_size.height =
3587            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3588    } else {
3589        mJpegSettings->thumbnail_size.width = 0;
3590        mJpegSettings->thumbnail_size.height = 0;
3591    }
3592    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3593        for (int i = 0; i < 3; i++) {
3594            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3595            *(mJpegSettings->gps_coordinates[i]) =
3596                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3597        }
3598    } else{
3599       for (int i = 0; i < 3; i++) {
3600            mJpegSettings->gps_coordinates[i] = NULL;
3601        }
3602    }
3603
3604    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3605        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3606        *(mJpegSettings->gps_timestamp) =
3607            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3608    } else {
3609        mJpegSettings->gps_timestamp = NULL;
3610    }
3611
3612    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3613        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3614        for (int i = 0; i < len; i++) {
3615            mJpegSettings->gps_processing_method[i] =
3616                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3617        }
3618        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3619            mJpegSettings->gps_processing_method[len] = '\0';
3620        }
3621    } else {
3622        mJpegSettings->gps_processing_method[0] = '\0';
3623    }
3624
3625    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3626        mJpegSettings->sensor_sensitivity =
3627            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3628    } else {
3629        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3630    }
3631
3632    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3633
3634    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3635        mJpegSettings->lens_focal_length =
3636            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3637    }
3638    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3639        mJpegSettings->exposure_compensation =
3640            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3641    }
3642    mJpegSettings->sharpness = 10; //default value
3643    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3644        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3645        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3646            mJpegSettings->sharpness = 0;
3647        }
3648    }
3649    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3650    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3651    mJpegSettings->is_jpeg_format = true;
3652    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3653    return 0;
3654}
3655
3656/*===========================================================================
3657 * FUNCTION   : captureResultCb
3658 *
3659 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3660 *
3661 * PARAMETERS :
3662 *   @frame  : frame information from mm-camera-interface
3663 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3664 *   @userdata: userdata
3665 *
3666 * RETURN     : NONE
3667 *==========================================================================*/
3668void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3669                camera3_stream_buffer_t *buffer,
3670                uint32_t frame_number, void *userdata)
3671{
3672    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3673    if (hw == NULL) {
3674        ALOGE("%s: Invalid hw %p", __func__, hw);
3675        return;
3676    }
3677
3678    hw->captureResultCb(metadata, buffer, frame_number);
3679    return;
3680}
3681
3682
3683/*===========================================================================
3684 * FUNCTION   : initialize
3685 *
3686 * DESCRIPTION: Pass framework callback pointers to HAL
3687 *
3688 * PARAMETERS :
3689 *
3690 *
3691 * RETURN     : Success : 0
3692 *              Failure: -ENODEV
3693 *==========================================================================*/
3694
3695int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3696                                  const camera3_callback_ops_t *callback_ops)
3697{
3698    ALOGV("%s: E", __func__);
3699    QCamera3HardwareInterface *hw =
3700        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3701    if (!hw) {
3702        ALOGE("%s: NULL camera device", __func__);
3703        return -ENODEV;
3704    }
3705
3706    int rc = hw->initialize(callback_ops);
3707    ALOGV("%s: X", __func__);
3708    return rc;
3709}
3710
3711/*===========================================================================
3712 * FUNCTION   : configure_streams
3713 *
3714 * DESCRIPTION:
3715 *
3716 * PARAMETERS :
3717 *
3718 *
3719 * RETURN     : Success: 0
3720 *              Failure: -EINVAL (if stream configuration is invalid)
3721 *                       -ENODEV (fatal error)
3722 *==========================================================================*/
3723
3724int QCamera3HardwareInterface::configure_streams(
3725        const struct camera3_device *device,
3726        camera3_stream_configuration_t *stream_list)
3727{
3728    ALOGV("%s: E", __func__);
3729    QCamera3HardwareInterface *hw =
3730        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3731    if (!hw) {
3732        ALOGE("%s: NULL camera device", __func__);
3733        return -ENODEV;
3734    }
3735    int rc = hw->configureStreams(stream_list);
3736    ALOGV("%s: X", __func__);
3737    return rc;
3738}
3739
3740/*===========================================================================
3741 * FUNCTION   : register_stream_buffers
3742 *
3743 * DESCRIPTION: Register stream buffers with the device
3744 *
3745 * PARAMETERS :
3746 *
3747 * RETURN     :
3748 *==========================================================================*/
3749int QCamera3HardwareInterface::register_stream_buffers(
3750        const struct camera3_device *device,
3751        const camera3_stream_buffer_set_t *buffer_set)
3752{
3753    ALOGV("%s: E", __func__);
3754    QCamera3HardwareInterface *hw =
3755        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3756    if (!hw) {
3757        ALOGE("%s: NULL camera device", __func__);
3758        return -ENODEV;
3759    }
3760    int rc = hw->registerStreamBuffers(buffer_set);
3761    ALOGV("%s: X", __func__);
3762    return rc;
3763}
3764
3765/*===========================================================================
3766 * FUNCTION   : construct_default_request_settings
3767 *
3768 * DESCRIPTION: Configure a settings buffer to meet the required use case
3769 *
3770 * PARAMETERS :
3771 *
3772 *
3773 * RETURN     : Success: Return valid metadata
3774 *              Failure: Return NULL
3775 *==========================================================================*/
3776const camera_metadata_t* QCamera3HardwareInterface::
3777    construct_default_request_settings(const struct camera3_device *device,
3778                                        int type)
3779{
3780
3781    ALOGV("%s: E", __func__);
3782    camera_metadata_t* fwk_metadata = NULL;
3783    QCamera3HardwareInterface *hw =
3784        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3785    if (!hw) {
3786        ALOGE("%s: NULL camera device", __func__);
3787        return NULL;
3788    }
3789
3790    fwk_metadata = hw->translateCapabilityToMetadata(type);
3791
3792    ALOGV("%s: X", __func__);
3793    return fwk_metadata;
3794}
3795
3796/*===========================================================================
3797 * FUNCTION   : process_capture_request
3798 *
3799 * DESCRIPTION:
3800 *
3801 * PARAMETERS :
3802 *
3803 *
3804 * RETURN     :
3805 *==========================================================================*/
3806int QCamera3HardwareInterface::process_capture_request(
3807                    const struct camera3_device *device,
3808                    camera3_capture_request_t *request)
3809{
3810    ALOGV("%s: E", __func__);
3811    QCamera3HardwareInterface *hw =
3812        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3813    if (!hw) {
3814        ALOGE("%s: NULL camera device", __func__);
3815        return -EINVAL;
3816    }
3817
3818    int rc = hw->processCaptureRequest(request);
3819    ALOGV("%s: X", __func__);
3820    return rc;
3821}
3822
3823/*===========================================================================
3824 * FUNCTION   : get_metadata_vendor_tag_ops
3825 *
3826 * DESCRIPTION:
3827 *
3828 * PARAMETERS :
3829 *
3830 *
3831 * RETURN     :
3832 *==========================================================================*/
3833
3834void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3835                const struct camera3_device *device,
3836                vendor_tag_query_ops_t* ops)
3837{
3838    ALOGV("%s: E", __func__);
3839    QCamera3HardwareInterface *hw =
3840        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3841    if (!hw) {
3842        ALOGE("%s: NULL camera device", __func__);
3843        return;
3844    }
3845
3846    hw->getMetadataVendorTagOps(ops);
3847    ALOGV("%s: X", __func__);
3848    return;
3849}
3850
3851/*===========================================================================
3852 * FUNCTION   : dump
3853 *
3854 * DESCRIPTION:
3855 *
3856 * PARAMETERS :
3857 *
3858 *
3859 * RETURN     :
3860 *==========================================================================*/
3861
3862void QCamera3HardwareInterface::dump(
3863                const struct camera3_device *device, int fd)
3864{
3865    ALOGV("%s: E", __func__);
3866    QCamera3HardwareInterface *hw =
3867        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3868    if (!hw) {
3869        ALOGE("%s: NULL camera device", __func__);
3870        return;
3871    }
3872
3873    hw->dump(fd);
3874    ALOGV("%s: X", __func__);
3875    return;
3876}
3877
3878/*===========================================================================
3879 * FUNCTION   : flush
3880 *
3881 * DESCRIPTION:
3882 *
3883 * PARAMETERS :
3884 *
3885 *
3886 * RETURN     :
3887 *==========================================================================*/
3888
3889int QCamera3HardwareInterface::flush(
3890                const struct camera3_device *device)
3891{
3892    int rc;
3893    ALOGV("%s: E", __func__);
3894    QCamera3HardwareInterface *hw =
3895        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3896    if (!hw) {
3897        ALOGE("%s: NULL camera device", __func__);
3898        return -EINVAL;
3899    }
3900
3901    rc = hw->flush();
3902    ALOGV("%s: X", __func__);
3903    return rc;
3904}
3905
3906/*===========================================================================
3907 * FUNCTION   : close_camera_device
3908 *
3909 * DESCRIPTION:
3910 *
3911 * PARAMETERS :
3912 *
3913 *
3914 * RETURN     :
3915 *==========================================================================*/
3916int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3917{
3918    ALOGV("%s: E", __func__);
3919    int ret = NO_ERROR;
3920    QCamera3HardwareInterface *hw =
3921        reinterpret_cast<QCamera3HardwareInterface *>(
3922            reinterpret_cast<camera3_device_t *>(device)->priv);
3923    if (!hw) {
3924        ALOGE("NULL camera device");
3925        return BAD_VALUE;
3926    }
3927    delete hw;
3928
3929    pthread_mutex_lock(&mCameraSessionLock);
3930    mCameraSessionActive = 0;
3931    pthread_mutex_unlock(&mCameraSessionLock);
3932    ALOGV("%s: X", __func__);
3933    return ret;
3934}
3935
3936/*===========================================================================
3937 * FUNCTION   : getWaveletDenoiseProcessPlate
3938 *
3939 * DESCRIPTION: query wavelet denoise process plate
3940 *
3941 * PARAMETERS : None
3942 *
3943 * RETURN     : WNR prcocess plate vlaue
3944 *==========================================================================*/
3945cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3946{
3947    char prop[PROPERTY_VALUE_MAX];
3948    memset(prop, 0, sizeof(prop));
3949    property_get("persist.denoise.process.plates", prop, "0");
3950    int processPlate = atoi(prop);
3951    switch(processPlate) {
3952    case 0:
3953        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3954    case 1:
3955        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3956    case 2:
3957        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3958    case 3:
3959        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3960    default:
3961        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3962    }
3963}
3964
3965/*===========================================================================
3966 * FUNCTION   : needRotationReprocess
3967 *
3968 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3969 *
3970 * PARAMETERS : none
3971 *
3972 * RETURN     : true: needed
3973 *              false: no need
3974 *==========================================================================*/
3975bool QCamera3HardwareInterface::needRotationReprocess()
3976{
3977
3978    if (!mJpegSettings->is_jpeg_format) {
3979        // RAW image, no need to reprocess
3980        return false;
3981    }
3982
3983    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3984        mJpegSettings->jpeg_orientation > 0) {
3985        // current rotation is not zero, and pp has the capability to process rotation
3986        ALOGD("%s: need do reprocess for rotation", __func__);
3987        return true;
3988    }
3989
3990    return false;
3991}
3992
3993/*===========================================================================
3994 * FUNCTION   : needReprocess
3995 *
3996 * DESCRIPTION: if reprocess in needed
3997 *
3998 * PARAMETERS : none
3999 *
4000 * RETURN     : true: needed
4001 *              false: no need
4002 *==========================================================================*/
4003bool QCamera3HardwareInterface::needReprocess()
4004{
4005    if (!mJpegSettings->is_jpeg_format) {
4006        // RAW image, no need to reprocess
4007        return false;
4008    }
4009
4010    if ((mJpegSettings->min_required_pp_mask > 0) ||
4011         isWNREnabled()) {
4012        // TODO: add for ZSL HDR later
4013        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4014        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4015        return true;
4016    }
4017    return needRotationReprocess();
4018}
4019
4020/*===========================================================================
4021 * FUNCTION   : addOnlineReprocChannel
4022 *
4023 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4024 *              coming from input channel
4025 *
4026 * PARAMETERS :
4027 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4028 *
4029 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4030 *==========================================================================*/
4031QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4032              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4033{
4034    int32_t rc = NO_ERROR;
4035    QCamera3ReprocessChannel *pChannel = NULL;
4036    if (pInputChannel == NULL) {
4037        ALOGE("%s: input channel obj is NULL", __func__);
4038        return NULL;
4039    }
4040
4041    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4042            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4043    if (NULL == pChannel) {
4044        ALOGE("%s: no mem for reprocess channel", __func__);
4045        return NULL;
4046    }
4047
4048    // Capture channel, only need snapshot and postview streams start together
4049    mm_camera_channel_attr_t attr;
4050    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4051    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4052    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4053    rc = pChannel->initialize();
4054    if (rc != NO_ERROR) {
4055        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4056        delete pChannel;
4057        return NULL;
4058    }
4059
4060    // pp feature config
4061    cam_pp_feature_config_t pp_config;
4062    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4063    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4064        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4065        pp_config.sharpness = mJpegSettings->sharpness;
4066    }
4067
4068    if (isWNREnabled()) {
4069        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4070        pp_config.denoise2d.denoise_enable = 1;
4071        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4072    }
4073    if (needRotationReprocess()) {
4074        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4075        int rotation = mJpegSettings->jpeg_orientation;
4076        if (rotation == 0) {
4077            pp_config.rotation = ROTATE_0;
4078        } else if (rotation == 90) {
4079            pp_config.rotation = ROTATE_90;
4080        } else if (rotation == 180) {
4081            pp_config.rotation = ROTATE_180;
4082        } else if (rotation == 270) {
4083            pp_config.rotation = ROTATE_270;
4084        }
4085    }
4086
4087   rc = pChannel->addReprocStreamsFromSource(pp_config,
4088                                             pInputChannel,
4089                                             mMetadataChannel);
4090
4091    if (rc != NO_ERROR) {
4092        delete pChannel;
4093        return NULL;
4094    }
4095    return pChannel;
4096}
4097
4098int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4099{
4100    return gCamCapability[mCameraId]->min_num_pp_bufs;
4101}
4102
4103bool QCamera3HardwareInterface::isWNREnabled() {
4104    return gCamCapability[mCameraId]->isWnrSupported;
4105}
4106
4107}; //end namespace qcamera
4108