QCamera3HWI.cpp revision 4e9f714df832a61716a2aa6e0a213333abef1a8c
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    mCallbackOps = callback_ops;
405
406    pthread_mutex_unlock(&mMutex);
407    mCameraInitialized = true;
408    return 0;
409
410err1:
411    pthread_mutex_unlock(&mMutex);
412    return rc;
413}
414
415/*===========================================================================
416 * FUNCTION   : configureStreams
417 *
418 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
419 *              and output streams.
420 *
421 * PARAMETERS :
422 *   @stream_list : streams to be configured
423 *
424 * RETURN     :
425 *
426 *==========================================================================*/
427int QCamera3HardwareInterface::configureStreams(
428        camera3_stream_configuration_t *streamList)
429{
430    int rc = 0;
431    mIsZslMode = false;
432
433    // Sanity check stream_list
434    if (streamList == NULL) {
435        ALOGE("%s: NULL stream configuration", __func__);
436        return BAD_VALUE;
437    }
438    if (streamList->streams == NULL) {
439        ALOGE("%s: NULL stream list", __func__);
440        return BAD_VALUE;
441    }
442
443    if (streamList->num_streams < 1) {
444        ALOGE("%s: Bad number of streams requested: %d", __func__,
445                streamList->num_streams);
446        return BAD_VALUE;
447    }
448
449    /* first invalidate all the steams in the mStreamList
450     * if they appear again, they will be validated */
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end(); it++) {
453        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454        channel->stop();
455        (*it)->status = INVALID;
456    }
457    if (mMetadataChannel) {
458        /* If content of mStreamInfo is not 0, there is metadata stream */
459        mMetadataChannel->stop();
460    }
461
462    pthread_mutex_lock(&mMutex);
463
464    camera3_stream_t *inputStream = NULL;
465    camera3_stream_t *jpegStream = NULL;
466    cam_stream_size_info_t stream_config_info;
467
468    for (size_t i = 0; i < streamList->num_streams; i++) {
469        camera3_stream_t *newStream = streamList->streams[i];
470        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
471                __func__, newStream->stream_type, newStream->format,
472                 newStream->width, newStream->height);
473        //if the stream is in the mStreamList validate it
474        bool stream_exists = false;
475        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
476                it != mStreamInfo.end(); it++) {
477            if ((*it)->stream == newStream) {
478                QCamera3Channel *channel =
479                    (QCamera3Channel*)(*it)->stream->priv;
480                stream_exists = true;
481                (*it)->status = RECONFIGURE;
482                /*delete the channel object associated with the stream because
483                  we need to reconfigure*/
484                delete channel;
485                (*it)->stream->priv = NULL;
486            }
487        }
488        if (!stream_exists) {
489            //new stream
490            stream_info_t* stream_info;
491            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
492            stream_info->stream = newStream;
493            stream_info->status = VALID;
494            stream_info->registered = 0;
495            mStreamInfo.push_back(stream_info);
496        }
497        if (newStream->stream_type == CAMERA3_STREAM_INPUT
498                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
499            if (inputStream != NULL) {
500                ALOGE("%s: Multiple input streams requested!", __func__);
501                pthread_mutex_unlock(&mMutex);
502                return BAD_VALUE;
503            }
504            inputStream = newStream;
505        }
506        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
507            jpegStream = newStream;
508        }
509    }
510    mInputStream = inputStream;
511
512    /*clean up invalid streams*/
513    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
514            it != mStreamInfo.end();) {
515        if(((*it)->status) == INVALID){
516            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
517            delete channel;
518            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
519            free(*it);
520            it = mStreamInfo.erase(it);
521        } else {
522            it++;
523        }
524    }
525    if (mMetadataChannel) {
526        delete mMetadataChannel;
527        mMetadataChannel = NULL;
528    }
529
530    //Create metadata channel and initialize it
531    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
532                    mCameraHandle->ops, captureResultCb,
533                    &gCamCapability[mCameraId]->padding_info, this);
534    if (mMetadataChannel == NULL) {
535        ALOGE("%s: failed to allocate metadata channel", __func__);
536        rc = -ENOMEM;
537        pthread_mutex_unlock(&mMutex);
538        return rc;
539    }
540    rc = mMetadataChannel->initialize();
541    if (rc < 0) {
542        ALOGE("%s: metadata channel initialization failed", __func__);
543        delete mMetadataChannel;
544        pthread_mutex_unlock(&mMutex);
545        return rc;
546    }
547
548    /* Allocate channel objects for the requested streams */
549    for (size_t i = 0; i < streamList->num_streams; i++) {
550        camera3_stream_t *newStream = streamList->streams[i];
551        uint32_t stream_usage = newStream->usage;
552        stream_config_info.stream_sizes[i].width = newStream->width;
553        stream_config_info.stream_sizes[i].height = newStream->height;
554        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
555            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
556            //for zsl stream the size is jpeg size
557            stream_config_info.stream_sizes[i].width = jpegStream->width;
558            stream_config_info.stream_sizes[i].height = jpegStream->height;
559            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
560        } else {
561           //for non zsl streams find out the format
562           switch (newStream->format) {
563           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
564              {
565                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
566                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
567                 } else {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
569                 }
570              }
571              break;
572           case HAL_PIXEL_FORMAT_YCbCr_420_888:
573              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
574              break;
575           case HAL_PIXEL_FORMAT_BLOB:
576              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
577              break;
578           default:
579              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
580              break;
581           }
582        }
583        if (newStream->priv == NULL) {
584            //New stream, construct channel
585            switch (newStream->stream_type) {
586            case CAMERA3_STREAM_INPUT:
587                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
588                break;
589            case CAMERA3_STREAM_BIDIRECTIONAL:
590                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
591                    GRALLOC_USAGE_HW_CAMERA_WRITE;
592                break;
593            case CAMERA3_STREAM_OUTPUT:
594                /* For video encoding stream, set read/write rarely
595                 * flag so that they may be set to un-cached */
596                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
597                    newStream->usage =
598                         (GRALLOC_USAGE_SW_READ_RARELY |
599                         GRALLOC_USAGE_SW_WRITE_RARELY |
600                         GRALLOC_USAGE_HW_CAMERA_WRITE);
601                else
602                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
603                break;
604            default:
605                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
606                break;
607            }
608
609            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
610                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
611                QCamera3Channel *channel;
612                switch (newStream->format) {
613                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
614                case HAL_PIXEL_FORMAT_YCbCr_420_888:
615                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
616                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
617                        jpegStream) {
618                        uint32_t width = jpegStream->width;
619                        uint32_t height = jpegStream->height;
620                        mIsZslMode = true;
621                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
622                            mCameraHandle->ops, captureResultCb,
623                            &gCamCapability[mCameraId]->padding_info, this, newStream,
624                            width, height);
625                    } else
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream);
629                    if (channel == NULL) {
630                        ALOGE("%s: allocation of channel failed", __func__);
631                        pthread_mutex_unlock(&mMutex);
632                        return -ENOMEM;
633                    }
634
635                    newStream->priv = channel;
636                    break;
637                case HAL_PIXEL_FORMAT_BLOB:
638                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
639                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
640                            mCameraHandle->ops, captureResultCb,
641                            &gCamCapability[mCameraId]->padding_info, this, newStream);
642                    if (mPictureChannel == NULL) {
643                        ALOGE("%s: allocation of channel failed", __func__);
644                        pthread_mutex_unlock(&mMutex);
645                        return -ENOMEM;
646                    }
647                    newStream->priv = (QCamera3Channel*)mPictureChannel;
648                    break;
649
650                //TODO: Add support for app consumed format?
651                default:
652                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
653                    break;
654                }
655            }
656        } else {
657            // Channel already exists for this stream
658            // Do nothing for now
659        }
660    }
661    /*For the streams to be reconfigured we need to register the buffers
662      since the framework wont*/
663    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664            it != mStreamInfo.end(); it++) {
665        if ((*it)->status == RECONFIGURE) {
666            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
667            /*only register buffers for streams that have already been
668              registered*/
669            if ((*it)->registered) {
670                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
671                        (*it)->buffer_set.buffers);
672                if (rc != NO_ERROR) {
673                    ALOGE("%s: Failed to register the buffers of old stream,\
674                            rc = %d", __func__, rc);
675                }
676                ALOGV("%s: channel %p has %d buffers",
677                        __func__, channel, (*it)->buffer_set.num_buffers);
678            }
679        }
680
681        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
682        if (index == NAME_NOT_FOUND) {
683            mPendingBuffersMap.add((*it)->stream, 0);
684        } else {
685            mPendingBuffersMap.editValueAt(index) = 0;
686        }
687    }
688
689    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
690    mPendingRequestsList.clear();
691
692    /*flush the metadata list*/
693    if (!mStoredMetadataList.empty()) {
694        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
695              m != mStoredMetadataList.end(); m++) {
696            mMetadataChannel->bufDone(m->meta_buf);
697            free(m->meta_buf);
698            m = mStoredMetadataList.erase(m);
699        }
700    }
701    int32_t hal_version = CAM_HAL_V3;
702    stream_config_info.num_streams = streamList->num_streams;
703
704    //settings/parameters don't carry over for new configureStreams
705    memset(mParameters, 0, sizeof(parm_buffer_t));
706
707    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
708    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
709                sizeof(hal_version), &hal_version);
710
711    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
712                sizeof(stream_config_info), &stream_config_info);
713
714    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
715
716    mFirstRequest = true;
717
718    //Get min frame duration for this streams configuration
719    deriveMinFrameDuration();
720
721    pthread_mutex_unlock(&mMutex);
722    return rc;
723}
724
725/*===========================================================================
726 * FUNCTION   : validateCaptureRequest
727 *
728 * DESCRIPTION: validate a capture request from camera service
729 *
730 * PARAMETERS :
731 *   @request : request from framework to process
732 *
733 * RETURN     :
734 *
735 *==========================================================================*/
736int QCamera3HardwareInterface::validateCaptureRequest(
737                    camera3_capture_request_t *request)
738{
739    ssize_t idx = 0;
740    const camera3_stream_buffer_t *b;
741    CameraMetadata meta;
742
743    /* Sanity check the request */
744    if (request == NULL) {
745        ALOGE("%s: NULL capture request", __func__);
746        return BAD_VALUE;
747    }
748
749    uint32_t frameNumber = request->frame_number;
750    if (request->input_buffer != NULL &&
751            request->input_buffer->stream != mInputStream) {
752        ALOGE("%s: Request %d: Input buffer not from input stream!",
753                __FUNCTION__, frameNumber);
754        return BAD_VALUE;
755    }
756    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
757        ALOGE("%s: Request %d: No output buffers provided!",
758                __FUNCTION__, frameNumber);
759        return BAD_VALUE;
760    }
761    if (request->input_buffer != NULL) {
762        b = request->input_buffer;
763        QCamera3Channel *channel =
764            static_cast<QCamera3Channel*>(b->stream->priv);
765        if (channel == NULL) {
766            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
767                    __func__, frameNumber, idx);
768            return BAD_VALUE;
769        }
770        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
771            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
772                    __func__, frameNumber, idx);
773            return BAD_VALUE;
774        }
775        if (b->release_fence != -1) {
776            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
777                    __func__, frameNumber, idx);
778            return BAD_VALUE;
779        }
780        if (b->buffer == NULL) {
781            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
782                    __func__, frameNumber, idx);
783            return BAD_VALUE;
784        }
785    }
786
787    // Validate all buffers
788    b = request->output_buffers;
789    do {
790        QCamera3Channel *channel =
791                static_cast<QCamera3Channel*>(b->stream->priv);
792        if (channel == NULL) {
793            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
794                    __func__, frameNumber, idx);
795            return BAD_VALUE;
796        }
797        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
798            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->release_fence != -1) {
803            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807        if (b->buffer == NULL) {
808            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
809                    __func__, frameNumber, idx);
810            return BAD_VALUE;
811        }
812        idx++;
813        b = request->output_buffers + idx;
814    } while (idx < (ssize_t)request->num_output_buffers);
815
816    return NO_ERROR;
817}
818
819/*===========================================================================
820 * FUNCTION   : deriveMinFrameDuration
821 *
822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
823 *              on currently configured streams.
824 *
825 * PARAMETERS : NONE
826 *
827 * RETURN     : NONE
828 *
829 *==========================================================================*/
830void QCamera3HardwareInterface::deriveMinFrameDuration()
831{
832    int32_t maxJpegDimension, maxProcessedDimension;
833
834    maxJpegDimension = 0;
835    maxProcessedDimension = 0;
836
837    // Figure out maximum jpeg, processed, and raw dimensions
838    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
839        it != mStreamInfo.end(); it++) {
840
841        // Input stream doesn't have valid stream_type
842        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
843            continue;
844
845        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
846        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
847            if (dimension > maxJpegDimension)
848                maxJpegDimension = dimension;
849        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
850            if (dimension > maxProcessedDimension)
851                maxProcessedDimension = dimension;
852        }
853    }
854
855    //Assume all jpeg dimensions are in processed dimensions.
856    if (maxJpegDimension > maxProcessedDimension)
857        maxProcessedDimension = maxJpegDimension;
858
859    //Find minimum durations for processed, jpeg, and raw
860    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
861    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
862        if (maxProcessedDimension ==
863            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
864            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
865            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
866            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
867            break;
868        }
869    }
870}
871
872/*===========================================================================
873 * FUNCTION   : getMinFrameDuration
874 *
875 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
876 *              and current request configuration.
877 *
878 * PARAMETERS : @request: requset sent by the frameworks
879 *
880 * RETURN     : min farme duration for a particular request
881 *
882 *==========================================================================*/
883int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
884{
885    bool hasJpegStream = false;
886    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
887        const camera3_stream_t *stream = request->output_buffers[i].stream;
888        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
889            hasJpegStream = true;
890    }
891
892    if (!hasJpegStream)
893        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
894    else
895        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
896}
897
898/*===========================================================================
899 * FUNCTION   : registerStreamBuffers
900 *
901 * DESCRIPTION: Register buffers for a given stream with the HAL device.
902 *
903 * PARAMETERS :
904 *   @stream_list : streams to be configured
905 *
906 * RETURN     :
907 *
908 *==========================================================================*/
909int QCamera3HardwareInterface::registerStreamBuffers(
910        const camera3_stream_buffer_set_t *buffer_set)
911{
912    int rc = 0;
913
914    pthread_mutex_lock(&mMutex);
915
916    if (buffer_set == NULL) {
917        ALOGE("%s: Invalid buffer_set parameter.", __func__);
918        pthread_mutex_unlock(&mMutex);
919        return -EINVAL;
920    }
921    if (buffer_set->stream == NULL) {
922        ALOGE("%s: Invalid stream parameter.", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return -EINVAL;
925    }
926    if (buffer_set->num_buffers < 1) {
927        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
928        pthread_mutex_unlock(&mMutex);
929        return -EINVAL;
930    }
931    if (buffer_set->buffers == NULL) {
932        ALOGE("%s: Invalid buffers parameter.", __func__);
933        pthread_mutex_unlock(&mMutex);
934        return -EINVAL;
935    }
936
937    camera3_stream_t *stream = buffer_set->stream;
938    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
939
940    //set the buffer_set in the mStreamInfo array
941    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
942            it != mStreamInfo.end(); it++) {
943        if ((*it)->stream == stream) {
944            uint32_t numBuffers = buffer_set->num_buffers;
945            (*it)->buffer_set.stream = buffer_set->stream;
946            (*it)->buffer_set.num_buffers = numBuffers;
947            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
948            if ((*it)->buffer_set.buffers == NULL) {
949                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
950                pthread_mutex_unlock(&mMutex);
951                return -ENOMEM;
952            }
953            for (size_t j = 0; j < numBuffers; j++){
954                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
955            }
956            (*it)->registered = 1;
957        }
958    }
959    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
960    if (rc < 0) {
961        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
962        pthread_mutex_unlock(&mMutex);
963        return -ENODEV;
964    }
965
966    pthread_mutex_unlock(&mMutex);
967    return NO_ERROR;
968}
969
970/*===========================================================================
971 * FUNCTION   : processCaptureRequest
972 *
973 * DESCRIPTION: process a capture request from camera service
974 *
975 * PARAMETERS :
976 *   @request : request from framework to process
977 *
978 * RETURN     :
979 *
980 *==========================================================================*/
981int QCamera3HardwareInterface::processCaptureRequest(
982                    camera3_capture_request_t *request)
983{
984    int rc = NO_ERROR;
985    int32_t request_id;
986    CameraMetadata meta;
987    MetadataBufferInfo reproc_meta;
988    int queueMetadata = 0;
989
990    pthread_mutex_lock(&mMutex);
991
992    rc = validateCaptureRequest(request);
993    if (rc != NO_ERROR) {
994        ALOGE("%s: incoming request is not valid", __func__);
995        pthread_mutex_unlock(&mMutex);
996        return rc;
997    }
998
999    meta = request->settings;
1000
1001    // For first capture request, send capture intent, and
1002    // stream on all streams
1003    if (mFirstRequest) {
1004
1005        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1006            int32_t hal_version = CAM_HAL_V3;
1007            uint8_t captureIntent =
1008                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1009
1010            memset(mParameters, 0, sizeof(parm_buffer_t));
1011            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1012            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1013                sizeof(hal_version), &hal_version);
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1015                sizeof(captureIntent), &captureIntent);
1016            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1017                mParameters);
1018        }
1019
1020        mMetadataChannel->start();
1021        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1022            it != mStreamInfo.end(); it++) {
1023            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1024            channel->start();
1025        }
1026    }
1027
1028    uint32_t frameNumber = request->frame_number;
1029    uint32_t streamTypeMask = 0;
1030
1031    if (meta.exists(ANDROID_REQUEST_ID)) {
1032        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1033        mCurrentRequestId = request_id;
1034        ALOGV("%s: Received request with id: %d",__func__, request_id);
1035    } else if (mFirstRequest || mCurrentRequestId == -1){
1036        ALOGE("%s: Unable to find request id field, \
1037                & no previous id available", __func__);
1038        return NAME_NOT_FOUND;
1039    } else {
1040        ALOGV("%s: Re-using old request id", __func__);
1041        request_id = mCurrentRequestId;
1042    }
1043
1044    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1045                                    __func__, __LINE__,
1046                                    request->num_output_buffers,
1047                                    request->input_buffer,
1048                                    frameNumber);
1049    // Acquire all request buffers first
1050    int blob_request = 0;
1051    for (size_t i = 0; i < request->num_output_buffers; i++) {
1052        const camera3_stream_buffer_t& output = request->output_buffers[i];
1053        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1054        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1055
1056        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1057        //Call function to store local copy of jpeg data for encode params.
1058            blob_request = 1;
1059            rc = getJpegSettings(request->settings);
1060            if (rc < 0) {
1061                ALOGE("%s: failed to get jpeg parameters", __func__);
1062                pthread_mutex_unlock(&mMutex);
1063                return rc;
1064            }
1065        }
1066
1067        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1068        if (rc != OK) {
1069            ALOGE("%s: fence wait failed %d", __func__, rc);
1070            pthread_mutex_unlock(&mMutex);
1071            return rc;
1072        }
1073        streamTypeMask |= channel->getStreamTypeMask();
1074    }
1075
1076    rc = setFrameParameters(request, streamTypeMask);
1077    if (rc < 0) {
1078        ALOGE("%s: fail to set frame parameters", __func__);
1079        pthread_mutex_unlock(&mMutex);
1080        return rc;
1081    }
1082
1083    /* Update pending request list and pending buffers map */
1084    PendingRequestInfo pendingRequest;
1085    pendingRequest.frame_number = frameNumber;
1086    pendingRequest.num_buffers = request->num_output_buffers;
1087    pendingRequest.request_id = request_id;
1088    pendingRequest.blob_request = blob_request;
1089    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1090
1091    for (size_t i = 0; i < request->num_output_buffers; i++) {
1092        RequestedBufferInfo requestedBuf;
1093        requestedBuf.stream = request->output_buffers[i].stream;
1094        requestedBuf.buffer = NULL;
1095        pendingRequest.buffers.push_back(requestedBuf);
1096
1097        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1098    }
1099    mPendingRequestsList.push_back(pendingRequest);
1100
1101    // Notify metadata channel we receive a request
1102    mMetadataChannel->request(NULL, frameNumber);
1103
1104    // Call request on other streams
1105    for (size_t i = 0; i < request->num_output_buffers; i++) {
1106        const camera3_stream_buffer_t& output = request->output_buffers[i];
1107        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1108        mm_camera_buf_def_t *pInputBuffer = NULL;
1109
1110        if (channel == NULL) {
1111            ALOGE("%s: invalid channel pointer for stream", __func__);
1112            continue;
1113        }
1114
1115        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1116            QCamera3RegularChannel* inputChannel = NULL;
1117            if(request->input_buffer != NULL){
1118                //Try to get the internal format
1119                inputChannel = (QCamera3RegularChannel*)
1120                    request->input_buffer->stream->priv;
1121                if(inputChannel == NULL ){
1122                    ALOGE("%s: failed to get input channel handle", __func__);
1123                } else {
1124                    pInputBuffer =
1125                        inputChannel->getInternalFormatBuffer(
1126                                request->input_buffer->buffer);
1127                    ALOGD("%s: Input buffer dump",__func__);
1128                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1129                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1130                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1131                    ALOGD("Handle:%p", request->input_buffer->buffer);
1132                    //TODO: need to get corresponding metadata and send it to pproc
1133                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1134                         m != mStoredMetadataList.end(); m++) {
1135                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1136                            reproc_meta.meta_buf = m->meta_buf;
1137                            queueMetadata = 1;
1138                            break;
1139                        }
1140                    }
1141                }
1142            }
1143            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1144                            pInputBuffer,(QCamera3Channel*)inputChannel);
1145            if (queueMetadata) {
1146                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1147            }
1148        } else {
1149            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1150                __LINE__, output.buffer, frameNumber);
1151            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1152                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1153                     m != mStoredMetadataList.end(); m++) {
1154                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1155                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1156                            mMetadataChannel->bufDone(m->meta_buf);
1157                            free(m->meta_buf);
1158                            m = mStoredMetadataList.erase(m);
1159                            break;
1160                        }
1161                   }
1162                }
1163            }
1164            rc = channel->request(output.buffer, frameNumber);
1165        }
1166        if (rc < 0)
1167            ALOGE("%s: request failed", __func__);
1168    }
1169
1170    mFirstRequest = false;
1171    // Added a timed condition wait
1172    struct timespec ts;
1173    uint8_t isValidTimeout = 1;
1174    rc = clock_gettime(CLOCK_REALTIME, &ts);
1175    if (rc < 0) {
1176        isValidTimeout = 0;
1177        ALOGE("%s: Error reading the real time clock!!", __func__);
1178    }
1179    else {
1180        // Make timeout as 5 sec for request to be honored
1181        ts.tv_sec += 5;
1182    }
1183    //Block on conditional variable
1184    mPendingRequest = 1;
1185    while (mPendingRequest == 1) {
1186        if (!isValidTimeout) {
1187            ALOGV("%s: Blocking on conditional wait", __func__);
1188            pthread_cond_wait(&mRequestCond, &mMutex);
1189        }
1190        else {
1191            ALOGV("%s: Blocking on timed conditional wait", __func__);
1192            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1193            if (rc == ETIMEDOUT) {
1194                rc = -ENODEV;
1195                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1196                break;
1197            }
1198        }
1199        ALOGV("%s: Unblocked", __func__);
1200    }
1201
1202    pthread_mutex_unlock(&mMutex);
1203    return rc;
1204}
1205
1206/*===========================================================================
1207 * FUNCTION   : getMetadataVendorTagOps
1208 *
1209 * DESCRIPTION:
1210 *
1211 * PARAMETERS :
1212 *
1213 *
1214 * RETURN     :
1215 *==========================================================================*/
1216void QCamera3HardwareInterface::getMetadataVendorTagOps(
1217                    vendor_tag_query_ops_t* /*ops*/)
1218{
1219    /* Enable locks when we eventually add Vendor Tags */
1220    /*
1221    pthread_mutex_lock(&mMutex);
1222
1223    pthread_mutex_unlock(&mMutex);
1224    */
1225    return;
1226}
1227
1228/*===========================================================================
1229 * FUNCTION   : dump
1230 *
1231 * DESCRIPTION:
1232 *
1233 * PARAMETERS :
1234 *
1235 *
1236 * RETURN     :
1237 *==========================================================================*/
1238void QCamera3HardwareInterface::dump(int /*fd*/)
1239{
1240    /*Enable lock when we implement this function*/
1241    /*
1242    pthread_mutex_lock(&mMutex);
1243
1244    pthread_mutex_unlock(&mMutex);
1245    */
1246    return;
1247}
1248
1249/*===========================================================================
1250 * FUNCTION   : flush
1251 *
1252 * DESCRIPTION:
1253 *
1254 * PARAMETERS :
1255 *
1256 *
1257 * RETURN     :
1258 *==========================================================================*/
1259int QCamera3HardwareInterface::flush()
1260{
1261    /*Enable lock when we implement this function*/
1262    /*
1263    pthread_mutex_lock(&mMutex);
1264
1265    pthread_mutex_unlock(&mMutex);
1266    */
1267    return 0;
1268}
1269
1270/*===========================================================================
1271 * FUNCTION   : captureResultCb
1272 *
1273 * DESCRIPTION: Callback handler for all capture result
1274 *              (streams, as well as metadata)
1275 *
1276 * PARAMETERS :
1277 *   @metadata : metadata information
1278 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1279 *               NULL if metadata.
1280 *
1281 * RETURN     : NONE
1282 *==========================================================================*/
1283void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1284                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1285{
1286    pthread_mutex_lock(&mMutex);
1287
1288    if (metadata_buf) {
1289        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1290        int32_t frame_number_valid = *(int32_t *)
1291            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1292        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1293            CAM_INTF_META_PENDING_REQUESTS, metadata);
1294        uint32_t frame_number = *(uint32_t *)
1295            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1296        const struct timeval *tv = (const struct timeval *)
1297            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1298        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1299            tv->tv_usec * NSEC_PER_USEC;
1300
1301        if (!frame_number_valid) {
1302            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1303            mMetadataChannel->bufDone(metadata_buf);
1304            goto done_metadata;
1305        }
1306        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1307                frame_number, capture_time);
1308
1309        // Go through the pending requests info and send shutter/results to frameworks
1310        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1311                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1312            camera3_capture_result_t result;
1313            camera3_notify_msg_t notify_msg;
1314            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1315
1316            // Flush out all entries with less or equal frame numbers.
1317
1318            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1319            //Right now it's the same as metadata timestamp
1320
1321            //TODO: When there is metadata drop, how do we derive the timestamp of
1322            //dropped frames? For now, we fake the dropped timestamp by substracting
1323            //from the reported timestamp
1324            nsecs_t current_capture_time = capture_time -
1325                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1326
1327            // Send shutter notify to frameworks
1328            notify_msg.type = CAMERA3_MSG_SHUTTER;
1329            notify_msg.message.shutter.frame_number = i->frame_number;
1330            notify_msg.message.shutter.timestamp = current_capture_time;
1331            mCallbackOps->notify(mCallbackOps, &notify_msg);
1332            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1333                    i->frame_number, capture_time);
1334
1335            // Send empty metadata with already filled buffers for dropped metadata
1336            // and send valid metadata with already filled buffers for current metadata
1337            if (i->frame_number < frame_number) {
1338                CameraMetadata dummyMetadata;
1339                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1340                        &current_capture_time, 1);
1341                dummyMetadata.update(ANDROID_REQUEST_ID,
1342                        &(i->request_id), 1);
1343                result.result = dummyMetadata.release();
1344            } else {
1345                result.result = translateCbMetadataToResultMetadata(metadata,
1346                        current_capture_time, i->request_id);
1347                if (mIsZslMode) {
1348                   int found_metadata = 0;
1349                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1350                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1351                        j != i->buffers.end(); j++) {
1352                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1353                         //check if corresp. zsl already exists in the stored metadata list
1354                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1355                               m != mStoredMetadataList.begin(); m++) {
1356                            if (m->frame_number == frame_number) {
1357                               m->meta_buf = metadata_buf;
1358                               found_metadata = 1;
1359                               break;
1360                            }
1361                         }
1362                         if (!found_metadata) {
1363                            MetadataBufferInfo store_meta_info;
1364                            store_meta_info.meta_buf = metadata_buf;
1365                            store_meta_info.frame_number = frame_number;
1366                            mStoredMetadataList.push_back(store_meta_info);
1367                            found_metadata = 1;
1368                         }
1369                      }
1370                   }
1371                   if (!found_metadata) {
1372                       if (!i->input_buffer_present && i->blob_request) {
1373                          //livesnapshot or fallback non-zsl snapshot case
1374                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1375                                j != i->buffers.end(); j++){
1376                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1377                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1378                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1379                                 break;
1380                              }
1381                         }
1382                       } else {
1383                            //return the metadata immediately
1384                            mMetadataChannel->bufDone(metadata_buf);
1385                            free(metadata_buf);
1386                       }
1387                   }
1388               } else if (!mIsZslMode && i->blob_request) {
1389                   //If it is a blob request then send the metadata to the picture channel
1390                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1391               } else {
1392                   // Return metadata buffer
1393                   mMetadataChannel->bufDone(metadata_buf);
1394                   free(metadata_buf);
1395               }
1396
1397            }
1398            if (!result.result) {
1399                ALOGE("%s: metadata is NULL", __func__);
1400            }
1401            result.frame_number = i->frame_number;
1402            result.num_output_buffers = 0;
1403            result.output_buffers = NULL;
1404            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1405                    j != i->buffers.end(); j++) {
1406                if (j->buffer) {
1407                    result.num_output_buffers++;
1408                }
1409            }
1410
1411            if (result.num_output_buffers > 0) {
1412                camera3_stream_buffer_t *result_buffers =
1413                    new camera3_stream_buffer_t[result.num_output_buffers];
1414                if (!result_buffers) {
1415                    ALOGE("%s: Fatal error: out of memory", __func__);
1416                }
1417                size_t result_buffers_idx = 0;
1418                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1419                        j != i->buffers.end(); j++) {
1420                    if (j->buffer) {
1421                        result_buffers[result_buffers_idx++] = *(j->buffer);
1422                        free(j->buffer);
1423                        j->buffer = NULL;
1424                        mPendingBuffersMap.editValueFor(j->stream)--;
1425                    }
1426                }
1427                result.output_buffers = result_buffers;
1428
1429                mCallbackOps->process_capture_result(mCallbackOps, &result);
1430                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1431                        __func__, result.frame_number, current_capture_time);
1432                free_camera_metadata((camera_metadata_t *)result.result);
1433                delete[] result_buffers;
1434            } else {
1435                mCallbackOps->process_capture_result(mCallbackOps, &result);
1436                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1437                        __func__, result.frame_number, current_capture_time);
1438                free_camera_metadata((camera_metadata_t *)result.result);
1439            }
1440            // erase the element from the list
1441            i = mPendingRequestsList.erase(i);
1442        }
1443
1444
1445done_metadata:
1446        bool max_buffers_dequeued = false;
1447        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1448            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1449            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1450            if (queued_buffers == stream->max_buffers) {
1451                max_buffers_dequeued = true;
1452                break;
1453            }
1454        }
1455        if (!max_buffers_dequeued && !pending_requests) {
1456            // Unblock process_capture_request
1457            mPendingRequest = 0;
1458            pthread_cond_signal(&mRequestCond);
1459        }
1460    } else {
1461        // If the frame number doesn't exist in the pending request list,
1462        // directly send the buffer to the frameworks, and update pending buffers map
1463        // Otherwise, book-keep the buffer.
1464        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1465        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1466            i++;
1467        }
1468        if (i == mPendingRequestsList.end()) {
1469            // Verify all pending requests frame_numbers are greater
1470            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1471                    j != mPendingRequestsList.end(); j++) {
1472                if (j->frame_number < frame_number) {
1473                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1474                            __func__, j->frame_number, frame_number);
1475                }
1476            }
1477            camera3_capture_result_t result;
1478            result.result = NULL;
1479            result.frame_number = frame_number;
1480            result.num_output_buffers = 1;
1481            result.output_buffers = buffer;
1482            ALOGV("%s: result frame_number = %d, buffer = %p",
1483                    __func__, frame_number, buffer);
1484            mPendingBuffersMap.editValueFor(buffer->stream)--;
1485            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1486                int found = 0;
1487                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1488                      k != mStoredMetadataList.end(); k++) {
1489                    if (k->frame_number == frame_number) {
1490                        k->zsl_buf_hdl = buffer->buffer;
1491                        found = 1;
1492                        break;
1493                    }
1494                }
1495                if (!found) {
1496                   MetadataBufferInfo meta_info;
1497                   meta_info.frame_number = frame_number;
1498                   meta_info.zsl_buf_hdl = buffer->buffer;
1499                   mStoredMetadataList.push_back(meta_info);
1500                }
1501            }
1502            mCallbackOps->process_capture_result(mCallbackOps, &result);
1503        } else {
1504            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1505                    j != i->buffers.end(); j++) {
1506                if (j->stream == buffer->stream) {
1507                    if (j->buffer != NULL) {
1508                        ALOGE("%s: Error: buffer is already set", __func__);
1509                    } else {
1510                        j->buffer = (camera3_stream_buffer_t *)malloc(
1511                                sizeof(camera3_stream_buffer_t));
1512                        *(j->buffer) = *buffer;
1513                        ALOGV("%s: cache buffer %p at result frame_number %d",
1514                                __func__, buffer, frame_number);
1515                    }
1516                }
1517            }
1518        }
1519    }
1520    pthread_mutex_unlock(&mMutex);
1521    return;
1522}
1523
1524/*===========================================================================
1525 * FUNCTION   : translateCbMetadataToResultMetadata
1526 *
1527 * DESCRIPTION:
1528 *
1529 * PARAMETERS :
1530 *   @metadata : metadata information from callback
1531 *
1532 * RETURN     : camera_metadata_t*
1533 *              metadata in a format specified by fwk
1534 *==========================================================================*/
1535camera_metadata_t*
1536QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1537                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1538                                 int32_t request_id)
1539{
1540    CameraMetadata camMetadata;
1541    camera_metadata_t* resultMetadata;
1542
1543    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1544    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1545
1546    /*CAM_INTF_META_HISTOGRAM - TODO*/
1547    /*cam_hist_stats_t  *histogram =
1548      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1549      metadata);*/
1550
1551    /*face detection*/
1552    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1553        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1554    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1555    int32_t faceIds[numFaces];
1556    uint8_t faceScores[numFaces];
1557    int32_t faceRectangles[numFaces * 4];
1558    int32_t faceLandmarks[numFaces * 6];
1559    int j = 0, k = 0;
1560    for (int i = 0; i < numFaces; i++) {
1561        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1562        faceScores[i] = faceDetectionInfo->faces[i].score;
1563        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1564                faceRectangles+j, -1);
1565        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1566        j+= 4;
1567        k+= 6;
1568    }
1569    if (numFaces > 0) {
1570        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1571        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1572        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1573            faceRectangles, numFaces*4);
1574        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1575            faceLandmarks, numFaces*6);
1576    }
1577
1578    uint8_t  *color_correct_mode =
1579        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1580    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1581
1582    int32_t  *ae_precapture_id =
1583        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1584    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1585
1586    /*aec regions*/
1587    cam_area_t  *hAeRegions =
1588        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1589    int32_t aeRegions[5];
1590    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1591    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1592
1593    uint8_t *ae_state =
1594            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1595    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1596
1597    uint8_t  *focusMode =
1598        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1599    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1600
1601    /*af regions*/
1602    cam_area_t  *hAfRegions =
1603        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1604    int32_t afRegions[5];
1605    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1606    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1607
1608    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1609    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1610
1611    int32_t  *afTriggerId =
1612        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1613    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1614
1615    uint8_t  *whiteBalance =
1616        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1617    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1618
1619    /*awb regions*/
1620    cam_area_t  *hAwbRegions =
1621        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1622    int32_t awbRegions[5];
1623    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1624    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1625
1626    uint8_t  *whiteBalanceState =
1627        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1628    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1629
1630    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1631    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1632
1633    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1634    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1635
1636    uint8_t  *flashPower =
1637        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1638    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1639
1640    int64_t  *flashFiringTime =
1641        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1642    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1643
1644    /*int32_t  *ledMode =
1645      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1646      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1647
1648    uint8_t  *flashState =
1649        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1650    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1651
1652    uint8_t  *hotPixelMode =
1653        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1654    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1655
1656    float  *lensAperture =
1657        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1658    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1659
1660    float  *filterDensity =
1661        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1662    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1663
1664    float  *focalLength =
1665        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1666    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1667
1668    float  *focusDistance =
1669        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1670    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1671
1672    float  *focusRange =
1673        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1674    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1675
1676    uint8_t  *opticalStab =
1677        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1678    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1679
1680    /*int32_t  *focusState =
1681      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1682      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1683
1684    uint8_t  *noiseRedMode =
1685        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1686    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1687
1688    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1689
1690    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1691        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1692    int32_t scalerCropRegion[4];
1693    scalerCropRegion[0] = hScalerCropRegion->left;
1694    scalerCropRegion[1] = hScalerCropRegion->top;
1695    scalerCropRegion[2] = hScalerCropRegion->width;
1696    scalerCropRegion[3] = hScalerCropRegion->height;
1697    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1698
1699    int64_t  *sensorExpTime =
1700        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1701    mMetadataResponse.exposure_time = *sensorExpTime;
1702    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1703    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1704
1705    int64_t  *sensorFameDuration =
1706        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1707    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1708    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1709
1710    int32_t  *sensorSensitivity =
1711        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1712    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1713    mMetadataResponse.iso_speed = *sensorSensitivity;
1714    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1715
1716    uint8_t  *shadingMode =
1717        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1718    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1719
1720    uint8_t  *faceDetectMode =
1721        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1722    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1723        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1724        *faceDetectMode);
1725    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1726
1727    uint8_t  *histogramMode =
1728        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1729    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1730
1731    uint8_t  *sharpnessMapMode =
1732        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1733    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1734            sharpnessMapMode, 1);
1735
1736    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1737    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1738        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1739    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1740            (int32_t*)sharpnessMap->sharpness,
1741            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1742
1743    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1744        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1745    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1746    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1747    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1748                       (float*)lensShadingMap->lens_shading,
1749                       4*map_width*map_height);
1750
1751    //Populate CAM_INTF_META_TONEMAP_CURVES
1752    /* ch0 = G, ch 1 = B, ch 2 = R*/
1753    cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1754        POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1755    camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1756                       (float*)tonemap->curves[0].tonemap_points,
1757                       tonemap->tonemap_points_cnt * 2);
1758
1759    camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1760                       (float*)tonemap->curves[1].tonemap_points,
1761                       tonemap->tonemap_points_cnt * 2);
1762
1763    camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1764                       (float*)tonemap->curves[2].tonemap_points,
1765                       tonemap->tonemap_points_cnt * 2);
1766
1767    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1768        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1769    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1770
1771    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1772        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1773    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1774                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1775
1776    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1777        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1778    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1779                       predColorCorrectionGains->gains, 4);
1780
1781    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1782        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1783    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1784                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1785
1786    uint8_t *blackLevelLock = (uint8_t*)
1787        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1788    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1789
1790    uint8_t *sceneFlicker = (uint8_t*)
1791        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1792    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1793
1794
1795    resultMetadata = camMetadata.release();
1796    return resultMetadata;
1797}
1798
1799/*===========================================================================
1800 * FUNCTION   : convertToRegions
1801 *
1802 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1803 *
1804 * PARAMETERS :
1805 *   @rect   : cam_rect_t struct to convert
1806 *   @region : int32_t destination array
1807 *   @weight : if we are converting from cam_area_t, weight is valid
1808 *             else weight = -1
1809 *
1810 *==========================================================================*/
1811void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1812    region[0] = rect.left;
1813    region[1] = rect.top;
1814    region[2] = rect.left + rect.width;
1815    region[3] = rect.top + rect.height;
1816    if (weight > -1) {
1817        region[4] = weight;
1818    }
1819}
1820
1821/*===========================================================================
1822 * FUNCTION   : convertFromRegions
1823 *
1824 * DESCRIPTION: helper method to convert from array to cam_rect_t
1825 *
1826 * PARAMETERS :
1827 *   @rect   : cam_rect_t struct to convert
1828 *   @region : int32_t destination array
1829 *   @weight : if we are converting from cam_area_t, weight is valid
1830 *             else weight = -1
1831 *
1832 *==========================================================================*/
1833void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1834                                                   const camera_metadata_t *settings,
1835                                                   uint32_t tag){
1836    CameraMetadata frame_settings;
1837    frame_settings = settings;
1838    int32_t x_min = frame_settings.find(tag).data.i32[0];
1839    int32_t y_min = frame_settings.find(tag).data.i32[1];
1840    int32_t x_max = frame_settings.find(tag).data.i32[2];
1841    int32_t y_max = frame_settings.find(tag).data.i32[3];
1842    roi->weight = frame_settings.find(tag).data.i32[4];
1843    roi->rect.left = x_min;
1844    roi->rect.top = y_min;
1845    roi->rect.width = x_max - x_min;
1846    roi->rect.height = y_max - y_min;
1847}
1848
1849/*===========================================================================
1850 * FUNCTION   : resetIfNeededROI
1851 *
1852 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1853 *              crop region
1854 *
1855 * PARAMETERS :
1856 *   @roi       : cam_area_t struct to resize
1857 *   @scalerCropRegion : cam_crop_region_t region to compare against
1858 *
1859 *
1860 *==========================================================================*/
1861bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1862                                                 const cam_crop_region_t* scalerCropRegion)
1863{
1864    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1865    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1866    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1867    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1868    if ((roi_x_max < scalerCropRegion->left) ||
1869        (roi_y_max < scalerCropRegion->top)  ||
1870        (roi->rect.left > crop_x_max) ||
1871        (roi->rect.top > crop_y_max)){
1872        return false;
1873    }
1874    if (roi->rect.left < scalerCropRegion->left) {
1875        roi->rect.left = scalerCropRegion->left;
1876    }
1877    if (roi->rect.top < scalerCropRegion->top) {
1878        roi->rect.top = scalerCropRegion->top;
1879    }
1880    if (roi_x_max > crop_x_max) {
1881        roi_x_max = crop_x_max;
1882    }
1883    if (roi_y_max > crop_y_max) {
1884        roi_y_max = crop_y_max;
1885    }
1886    roi->rect.width = roi_x_max - roi->rect.left;
1887    roi->rect.height = roi_y_max - roi->rect.top;
1888    return true;
1889}
1890
1891/*===========================================================================
1892 * FUNCTION   : convertLandmarks
1893 *
1894 * DESCRIPTION: helper method to extract the landmarks from face detection info
1895 *
1896 * PARAMETERS :
1897 *   @face   : cam_rect_t struct to convert
1898 *   @landmarks : int32_t destination array
1899 *
1900 *
1901 *==========================================================================*/
1902void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1903{
1904    landmarks[0] = face.left_eye_center.x;
1905    landmarks[1] = face.left_eye_center.y;
1906    landmarks[2] = face.right_eye_center.y;
1907    landmarks[3] = face.right_eye_center.y;
1908    landmarks[4] = face.mouth_center.x;
1909    landmarks[5] = face.mouth_center.y;
1910}
1911
1912#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1913/*===========================================================================
1914 * FUNCTION   : initCapabilities
1915 *
1916 * DESCRIPTION: initialize camera capabilities in static data struct
1917 *
1918 * PARAMETERS :
1919 *   @cameraId  : camera Id
1920 *
1921 * RETURN     : int32_t type of status
1922 *              NO_ERROR  -- success
1923 *              none-zero failure code
1924 *==========================================================================*/
1925int QCamera3HardwareInterface::initCapabilities(int cameraId)
1926{
1927    int rc = 0;
1928    mm_camera_vtbl_t *cameraHandle = NULL;
1929    QCamera3HeapMemory *capabilityHeap = NULL;
1930
1931    cameraHandle = camera_open(cameraId);
1932    if (!cameraHandle) {
1933        ALOGE("%s: camera_open failed", __func__);
1934        rc = -1;
1935        goto open_failed;
1936    }
1937
1938    capabilityHeap = new QCamera3HeapMemory();
1939    if (capabilityHeap == NULL) {
1940        ALOGE("%s: creation of capabilityHeap failed", __func__);
1941        goto heap_creation_failed;
1942    }
1943    /* Allocate memory for capability buffer */
1944    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1945    if(rc != OK) {
1946        ALOGE("%s: No memory for cappability", __func__);
1947        goto allocate_failed;
1948    }
1949
1950    /* Map memory for capability buffer */
1951    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1952    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1953                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1954                                capabilityHeap->getFd(0),
1955                                sizeof(cam_capability_t));
1956    if(rc < 0) {
1957        ALOGE("%s: failed to map capability buffer", __func__);
1958        goto map_failed;
1959    }
1960
1961    /* Query Capability */
1962    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1963    if(rc < 0) {
1964        ALOGE("%s: failed to query capability",__func__);
1965        goto query_failed;
1966    }
1967    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1968    if (!gCamCapability[cameraId]) {
1969        ALOGE("%s: out of memory", __func__);
1970        goto query_failed;
1971    }
1972    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1973                                        sizeof(cam_capability_t));
1974    rc = 0;
1975
1976query_failed:
1977    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1978                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1979map_failed:
1980    capabilityHeap->deallocate();
1981allocate_failed:
1982    delete capabilityHeap;
1983heap_creation_failed:
1984    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1985    cameraHandle = NULL;
1986open_failed:
1987    return rc;
1988}
1989
1990/*===========================================================================
1991 * FUNCTION   : initParameters
1992 *
1993 * DESCRIPTION: initialize camera parameters
1994 *
1995 * PARAMETERS :
1996 *
1997 * RETURN     : int32_t type of status
1998 *              NO_ERROR  -- success
1999 *              none-zero failure code
2000 *==========================================================================*/
2001int QCamera3HardwareInterface::initParameters()
2002{
2003    int rc = 0;
2004
2005    //Allocate Set Param Buffer
2006    mParamHeap = new QCamera3HeapMemory();
2007    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2008    if(rc != OK) {
2009        rc = NO_MEMORY;
2010        ALOGE("Failed to allocate SETPARM Heap memory");
2011        delete mParamHeap;
2012        mParamHeap = NULL;
2013        return rc;
2014    }
2015
2016    //Map memory for parameters buffer
2017    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2018            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2019            mParamHeap->getFd(0),
2020            sizeof(parm_buffer_t));
2021    if(rc < 0) {
2022        ALOGE("%s:failed to map SETPARM buffer",__func__);
2023        rc = FAILED_TRANSACTION;
2024        mParamHeap->deallocate();
2025        delete mParamHeap;
2026        mParamHeap = NULL;
2027        return rc;
2028    }
2029
2030    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2031    return rc;
2032}
2033
2034/*===========================================================================
2035 * FUNCTION   : deinitParameters
2036 *
2037 * DESCRIPTION: de-initialize camera parameters
2038 *
2039 * PARAMETERS :
2040 *
2041 * RETURN     : NONE
2042 *==========================================================================*/
2043void QCamera3HardwareInterface::deinitParameters()
2044{
2045    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2046            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2047
2048    mParamHeap->deallocate();
2049    delete mParamHeap;
2050    mParamHeap = NULL;
2051
2052    mParameters = NULL;
2053}
2054
2055/*===========================================================================
2056 * FUNCTION   : calcMaxJpegSize
2057 *
2058 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2059 *
2060 * PARAMETERS :
2061 *
2062 * RETURN     : max_jpeg_size
2063 *==========================================================================*/
2064int QCamera3HardwareInterface::calcMaxJpegSize()
2065{
2066    int32_t max_jpeg_size = 0;
2067    int temp_width, temp_height;
2068    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2069        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2070        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2071        if (temp_width * temp_height > max_jpeg_size ) {
2072            max_jpeg_size = temp_width * temp_height;
2073        }
2074    }
2075    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2076    return max_jpeg_size;
2077}
2078
2079/*===========================================================================
2080 * FUNCTION   : initStaticMetadata
2081 *
2082 * DESCRIPTION: initialize the static metadata
2083 *
2084 * PARAMETERS :
2085 *   @cameraId  : camera Id
2086 *
2087 * RETURN     : int32_t type of status
2088 *              0  -- success
2089 *              non-zero failure code
2090 *==========================================================================*/
2091int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2092{
2093    int rc = 0;
2094    CameraMetadata staticInfo;
2095
2096    /* android.info: hardware level */
2097    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2098    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2099        &supportedHardwareLevel, 1);
2100
2101    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2102    /*HAL 3 only*/
2103    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2104                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2105
2106    /*hard coded for now but this should come from sensor*/
2107    float min_focus_distance;
2108    if(facingBack){
2109        min_focus_distance = 10;
2110    } else {
2111        min_focus_distance = 0;
2112    }
2113    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2114                    &min_focus_distance, 1);
2115
2116    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2117                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2118
2119    /*should be using focal lengths but sensor doesn't provide that info now*/
2120    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2121                      &gCamCapability[cameraId]->focal_length,
2122                      1);
2123
2124    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2125                      gCamCapability[cameraId]->apertures,
2126                      gCamCapability[cameraId]->apertures_count);
2127
2128    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2129                gCamCapability[cameraId]->filter_densities,
2130                gCamCapability[cameraId]->filter_densities_count);
2131
2132
2133    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2134                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2135                      gCamCapability[cameraId]->optical_stab_modes_count);
2136
2137    staticInfo.update(ANDROID_LENS_POSITION,
2138                      gCamCapability[cameraId]->lens_position,
2139                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2140
2141    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2142                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2143    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2144                      lens_shading_map_size,
2145                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2146
2147    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2148                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2149    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2150            geo_correction_map_size,
2151            sizeof(geo_correction_map_size)/sizeof(int32_t));
2152
2153    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2154                       gCamCapability[cameraId]->geo_correction_map,
2155                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2156
2157    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2158            gCamCapability[cameraId]->sensor_physical_size, 2);
2159
2160    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2161            gCamCapability[cameraId]->exposure_time_range, 2);
2162
2163    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2164            &gCamCapability[cameraId]->max_frame_duration, 1);
2165
2166
2167    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2168                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2169
2170    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2171                                               gCamCapability[cameraId]->pixel_array_size.height};
2172    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2173                      pixel_array_size, 2);
2174
2175    int32_t active_array_size[] = {0, 0,
2176                                                gCamCapability[cameraId]->active_array_size.width,
2177                                                gCamCapability[cameraId]->active_array_size.height};
2178    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2179                      active_array_size, 4);
2180
2181    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2182            &gCamCapability[cameraId]->white_level, 1);
2183
2184    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2185            gCamCapability[cameraId]->black_level_pattern, 4);
2186
2187    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2188                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2189
2190    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2191                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2192
2193    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2194                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2195
2196    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2197                      &gCamCapability[cameraId]->histogram_size, 1);
2198
2199    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2200            &gCamCapability[cameraId]->max_histogram_count, 1);
2201
2202    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2203                                                gCamCapability[cameraId]->sharpness_map_size.height};
2204
2205    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2206            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2207
2208    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2209            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2210
2211
2212    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2213                      &gCamCapability[cameraId]->raw_min_duration,
2214                       1);
2215
2216    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2217                                                HAL_PIXEL_FORMAT_BLOB};
2218    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2219    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2220                      scalar_formats,
2221                      scalar_formats_count);
2222
2223    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2224    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2225              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2226              available_processed_sizes);
2227    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2228                available_processed_sizes,
2229                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2230
2231    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2232                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2233                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2234
2235    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2236    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2237                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2238                 available_fps_ranges);
2239    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2240            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2241
2242    camera_metadata_rational exposureCompensationStep = {
2243            gCamCapability[cameraId]->exp_compensation_step.numerator,
2244            gCamCapability[cameraId]->exp_compensation_step.denominator};
2245    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2246                      &exposureCompensationStep, 1);
2247
2248    /*TO DO*/
2249    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2250    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2251                      availableVstabModes, sizeof(availableVstabModes));
2252
2253    /*HAL 1 and HAL 3 common*/
2254    float maxZoom = 4;
2255    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2256            &maxZoom, 1);
2257
2258    int32_t max3aRegions = 1;
2259    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2260            &max3aRegions, 1);
2261
2262    uint8_t availableFaceDetectModes[] = {
2263            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2264            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2265    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2266                      availableFaceDetectModes,
2267                      sizeof(availableFaceDetectModes));
2268
2269    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2270                                       gCamCapability[cameraId]->raw_dim.height};
2271    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2272                      raw_size,
2273                      sizeof(raw_size)/sizeof(uint32_t));
2274
2275    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2276                                                        gCamCapability[cameraId]->exposure_compensation_max};
2277    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2278            exposureCompensationRange,
2279            sizeof(exposureCompensationRange)/sizeof(int32_t));
2280
2281    uint8_t lensFacing = (facingBack) ?
2282            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2283    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2284
2285    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2286                available_processed_sizes,
2287                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2288
2289    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2290                      available_thumbnail_sizes,
2291                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2292
2293    int32_t max_jpeg_size = 0;
2294    int temp_width, temp_height;
2295    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2296        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2297        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2298        if (temp_width * temp_height > max_jpeg_size ) {
2299            max_jpeg_size = temp_width * temp_height;
2300        }
2301    }
2302    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2303    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2304                      &max_jpeg_size, 1);
2305
2306    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2307    int32_t size = 0;
2308    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2309        int val = lookupFwkName(EFFECT_MODES_MAP,
2310                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2311                                   gCamCapability[cameraId]->supported_effects[i]);
2312        if (val != NAME_NOT_FOUND) {
2313            avail_effects[size] = (uint8_t)val;
2314            size++;
2315        }
2316    }
2317    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2318                      avail_effects,
2319                      size);
2320
2321    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2322    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2323    int32_t supported_scene_modes_cnt = 0;
2324    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2325        int val = lookupFwkName(SCENE_MODES_MAP,
2326                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2327                                gCamCapability[cameraId]->supported_scene_modes[i]);
2328        if (val != NAME_NOT_FOUND) {
2329            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2330            supported_indexes[supported_scene_modes_cnt] = i;
2331            supported_scene_modes_cnt++;
2332        }
2333    }
2334
2335    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2336                      avail_scene_modes,
2337                      supported_scene_modes_cnt);
2338
2339    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2340    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2341                      supported_scene_modes_cnt,
2342                      scene_mode_overrides,
2343                      supported_indexes,
2344                      cameraId);
2345    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2346                      scene_mode_overrides,
2347                      supported_scene_modes_cnt*3);
2348
2349    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2350    size = 0;
2351    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2352        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2353                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2354                                 gCamCapability[cameraId]->supported_antibandings[i]);
2355        if (val != NAME_NOT_FOUND) {
2356            avail_antibanding_modes[size] = (uint8_t)val;
2357            size++;
2358        }
2359
2360    }
2361    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2362                      avail_antibanding_modes,
2363                      size);
2364
2365    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2366    size = 0;
2367    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2368        int val = lookupFwkName(FOCUS_MODES_MAP,
2369                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2370                                gCamCapability[cameraId]->supported_focus_modes[i]);
2371        if (val != NAME_NOT_FOUND) {
2372            avail_af_modes[size] = (uint8_t)val;
2373            size++;
2374        }
2375    }
2376    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2377                      avail_af_modes,
2378                      size);
2379
2380    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2381    size = 0;
2382    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2383        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2384                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2385                                    gCamCapability[cameraId]->supported_white_balances[i]);
2386        if (val != NAME_NOT_FOUND) {
2387            avail_awb_modes[size] = (uint8_t)val;
2388            size++;
2389        }
2390    }
2391    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2392                      avail_awb_modes,
2393                      size);
2394
2395    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2396    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2397      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2398
2399    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2400            available_flash_levels,
2401            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2402
2403
2404    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2405    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2406            &flashAvailable, 1);
2407
2408    uint8_t avail_ae_modes[5];
2409    size = 0;
2410    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2411        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2412        size++;
2413    }
2414    if (flashAvailable) {
2415        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2416        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2417        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2418    }
2419    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2420                      avail_ae_modes,
2421                      size);
2422
2423    int32_t sensitivity_range[2];
2424    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2425    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2426    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2427                      sensitivity_range,
2428                      sizeof(sensitivity_range) / sizeof(int32_t));
2429
2430    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2431                      &gCamCapability[cameraId]->max_analog_sensitivity,
2432                      1);
2433
2434    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2435                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2436                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2437
2438    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2439    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2440                      &sensor_orientation,
2441                      1);
2442
2443    int32_t max_output_streams[3] = {1, 3, 1};
2444    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2445                      max_output_streams,
2446                      3);
2447
2448    gStaticMetadata[cameraId] = staticInfo.release();
2449    return rc;
2450}
2451
2452/*===========================================================================
2453 * FUNCTION   : makeTable
2454 *
2455 * DESCRIPTION: make a table of sizes
2456 *
2457 * PARAMETERS :
2458 *
2459 *
2460 *==========================================================================*/
2461void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2462                                          int32_t* sizeTable)
2463{
2464    int j = 0;
2465    for (int i = 0; i < size; i++) {
2466        sizeTable[j] = dimTable[i].width;
2467        sizeTable[j+1] = dimTable[i].height;
2468        j+=2;
2469    }
2470}
2471
2472/*===========================================================================
2473 * FUNCTION   : makeFPSTable
2474 *
2475 * DESCRIPTION: make a table of fps ranges
2476 *
2477 * PARAMETERS :
2478 *
2479 *==========================================================================*/
2480void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2481                                          int32_t* fpsRangesTable)
2482{
2483    int j = 0;
2484    for (int i = 0; i < size; i++) {
2485        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2486        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2487        j+=2;
2488    }
2489}
2490
2491/*===========================================================================
2492 * FUNCTION   : makeOverridesList
2493 *
2494 * DESCRIPTION: make a list of scene mode overrides
2495 *
2496 * PARAMETERS :
2497 *
2498 *
2499 *==========================================================================*/
2500void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2501                                                  uint8_t size, uint8_t* overridesList,
2502                                                  uint8_t* supported_indexes,
2503                                                  int camera_id)
2504{
2505    /*daemon will give a list of overrides for all scene modes.
2506      However we should send the fwk only the overrides for the scene modes
2507      supported by the framework*/
2508    int j = 0, index = 0, supt = 0;
2509    uint8_t focus_override;
2510    for (int i = 0; i < size; i++) {
2511        supt = 0;
2512        index = supported_indexes[i];
2513        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2514        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2515                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2516                                                    overridesTable[index].awb_mode);
2517        focus_override = (uint8_t)overridesTable[index].af_mode;
2518        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2519           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2520              supt = 1;
2521              break;
2522           }
2523        }
2524        if (supt) {
2525           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2526                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2527                                              focus_override);
2528        } else {
2529           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2530        }
2531        j+=3;
2532    }
2533}
2534
2535/*===========================================================================
2536 * FUNCTION   : getPreviewHalPixelFormat
2537 *
2538 * DESCRIPTION: convert the format to type recognized by framework
2539 *
2540 * PARAMETERS : format : the format from backend
2541 *
2542 ** RETURN    : format recognized by framework
2543 *
2544 *==========================================================================*/
2545int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2546{
2547    int32_t halPixelFormat;
2548
2549    switch (format) {
2550    case CAM_FORMAT_YUV_420_NV12:
2551        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2552        break;
2553    case CAM_FORMAT_YUV_420_NV21:
2554        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2555        break;
2556    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2557        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2558        break;
2559    case CAM_FORMAT_YUV_420_YV12:
2560        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2561        break;
2562    case CAM_FORMAT_YUV_422_NV16:
2563    case CAM_FORMAT_YUV_422_NV61:
2564    default:
2565        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2566        break;
2567    }
2568    return halPixelFormat;
2569}
2570
2571/*===========================================================================
2572 * FUNCTION   : getSensorSensitivity
2573 *
2574 * DESCRIPTION: convert iso_mode to an integer value
2575 *
2576 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2577 *
2578 ** RETURN    : sensitivity supported by sensor
2579 *
2580 *==========================================================================*/
2581int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2582{
2583    int32_t sensitivity;
2584
2585    switch (iso_mode) {
2586    case CAM_ISO_MODE_100:
2587        sensitivity = 100;
2588        break;
2589    case CAM_ISO_MODE_200:
2590        sensitivity = 200;
2591        break;
2592    case CAM_ISO_MODE_400:
2593        sensitivity = 400;
2594        break;
2595    case CAM_ISO_MODE_800:
2596        sensitivity = 800;
2597        break;
2598    case CAM_ISO_MODE_1600:
2599        sensitivity = 1600;
2600        break;
2601    default:
2602        sensitivity = -1;
2603        break;
2604    }
2605    return sensitivity;
2606}
2607
2608
2609/*===========================================================================
2610 * FUNCTION   : AddSetParmEntryToBatch
2611 *
2612 * DESCRIPTION: add set parameter entry into batch
2613 *
2614 * PARAMETERS :
2615 *   @p_table     : ptr to parameter buffer
2616 *   @paramType   : parameter type
2617 *   @paramLength : length of parameter value
2618 *   @paramValue  : ptr to parameter value
2619 *
2620 * RETURN     : int32_t type of status
2621 *              NO_ERROR  -- success
2622 *              none-zero failure code
2623 *==========================================================================*/
2624int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2625                                                          cam_intf_parm_type_t paramType,
2626                                                          uint32_t paramLength,
2627                                                          void *paramValue)
2628{
2629    int position = paramType;
2630    int current, next;
2631
2632    /*************************************************************************
2633    *                 Code to take care of linking next flags                *
2634    *************************************************************************/
2635    current = GET_FIRST_PARAM_ID(p_table);
2636    if (position == current){
2637        //DO NOTHING
2638    } else if (position < current){
2639        SET_NEXT_PARAM_ID(position, p_table, current);
2640        SET_FIRST_PARAM_ID(p_table, position);
2641    } else {
2642        /* Search for the position in the linked list where we need to slot in*/
2643        while (position > GET_NEXT_PARAM_ID(current, p_table))
2644            current = GET_NEXT_PARAM_ID(current, p_table);
2645
2646        /*If node already exists no need to alter linking*/
2647        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2648            next = GET_NEXT_PARAM_ID(current, p_table);
2649            SET_NEXT_PARAM_ID(current, p_table, position);
2650            SET_NEXT_PARAM_ID(position, p_table, next);
2651        }
2652    }
2653
2654    /*************************************************************************
2655    *                   Copy contents into entry                             *
2656    *************************************************************************/
2657
2658    if (paramLength > sizeof(parm_type_t)) {
2659        ALOGE("%s:Size of input larger than max entry size",__func__);
2660        return BAD_VALUE;
2661    }
2662    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2663    return NO_ERROR;
2664}
2665
2666/*===========================================================================
2667 * FUNCTION   : lookupFwkName
2668 *
2669 * DESCRIPTION: In case the enum is not same in fwk and backend
2670 *              make sure the parameter is correctly propogated
2671 *
2672 * PARAMETERS  :
2673 *   @arr      : map between the two enums
2674 *   @len      : len of the map
2675 *   @hal_name : name of the hal_parm to map
2676 *
2677 * RETURN     : int type of status
2678 *              fwk_name  -- success
2679 *              none-zero failure code
2680 *==========================================================================*/
2681int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2682                                             int len, int hal_name)
2683{
2684
2685    for (int i = 0; i < len; i++) {
2686        if (arr[i].hal_name == hal_name)
2687            return arr[i].fwk_name;
2688    }
2689
2690    /* Not able to find matching framework type is not necessarily
2691     * an error case. This happens when mm-camera supports more attributes
2692     * than the frameworks do */
2693    ALOGD("%s: Cannot find matching framework type", __func__);
2694    return NAME_NOT_FOUND;
2695}
2696
2697/*===========================================================================
2698 * FUNCTION   : lookupHalName
2699 *
2700 * DESCRIPTION: In case the enum is not same in fwk and backend
2701 *              make sure the parameter is correctly propogated
2702 *
2703 * PARAMETERS  :
2704 *   @arr      : map between the two enums
2705 *   @len      : len of the map
2706 *   @fwk_name : name of the hal_parm to map
2707 *
2708 * RETURN     : int32_t type of status
2709 *              hal_name  -- success
2710 *              none-zero failure code
2711 *==========================================================================*/
2712int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2713                                             int len, int fwk_name)
2714{
2715    for (int i = 0; i < len; i++) {
2716       if (arr[i].fwk_name == fwk_name)
2717           return arr[i].hal_name;
2718    }
2719    ALOGE("%s: Cannot find matching hal type", __func__);
2720    return NAME_NOT_FOUND;
2721}
2722
2723/*===========================================================================
2724 * FUNCTION   : getCapabilities
2725 *
2726 * DESCRIPTION: query camera capabilities
2727 *
2728 * PARAMETERS :
2729 *   @cameraId  : camera Id
2730 *   @info      : camera info struct to be filled in with camera capabilities
2731 *
2732 * RETURN     : int32_t type of status
2733 *              NO_ERROR  -- success
2734 *              none-zero failure code
2735 *==========================================================================*/
2736int QCamera3HardwareInterface::getCamInfo(int cameraId,
2737                                    struct camera_info *info)
2738{
2739    int rc = 0;
2740
2741    if (NULL == gCamCapability[cameraId]) {
2742        rc = initCapabilities(cameraId);
2743        if (rc < 0) {
2744            //pthread_mutex_unlock(&g_camlock);
2745            return rc;
2746        }
2747    }
2748
2749    if (NULL == gStaticMetadata[cameraId]) {
2750        rc = initStaticMetadata(cameraId);
2751        if (rc < 0) {
2752            return rc;
2753        }
2754    }
2755
2756    switch(gCamCapability[cameraId]->position) {
2757    case CAM_POSITION_BACK:
2758        info->facing = CAMERA_FACING_BACK;
2759        break;
2760
2761    case CAM_POSITION_FRONT:
2762        info->facing = CAMERA_FACING_FRONT;
2763        break;
2764
2765    default:
2766        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2767        rc = -1;
2768        break;
2769    }
2770
2771
2772    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2773    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2774    info->static_camera_characteristics = gStaticMetadata[cameraId];
2775
2776    return rc;
2777}
2778
2779/*===========================================================================
2780 * FUNCTION   : translateMetadata
2781 *
2782 * DESCRIPTION: translate the metadata into camera_metadata_t
2783 *
2784 * PARAMETERS : type of the request
2785 *
2786 *
2787 * RETURN     : success: camera_metadata_t*
2788 *              failure: NULL
2789 *
2790 *==========================================================================*/
2791camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2792{
2793    pthread_mutex_lock(&mMutex);
2794
2795    if (mDefaultMetadata[type] != NULL) {
2796        pthread_mutex_unlock(&mMutex);
2797        return mDefaultMetadata[type];
2798    }
2799    //first time we are handling this request
2800    //fill up the metadata structure using the wrapper class
2801    CameraMetadata settings;
2802    //translate from cam_capability_t to camera_metadata_tag_t
2803    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2804    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2805
2806    /*control*/
2807
2808    uint8_t controlIntent = 0;
2809    switch (type) {
2810      case CAMERA3_TEMPLATE_PREVIEW:
2811        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2812        break;
2813      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2814        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2815        break;
2816      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2817        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2818        break;
2819      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2820        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2821        break;
2822      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2823        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2824        break;
2825      default:
2826        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2827        break;
2828    }
2829    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2830
2831    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2832            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2833
2834    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2835    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2836
2837    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2838    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2839
2840    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2841    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2842
2843    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2844    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2845
2846    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2847    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2848
2849    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2850    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2851
2852    static uint8_t focusMode;
2853    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2854        ALOGE("%s: Setting focus mode to auto", __func__);
2855        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2856    } else {
2857        ALOGE("%s: Setting focus mode to off", __func__);
2858        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2859    }
2860    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2861
2862    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2863    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2864
2865    /*flash*/
2866    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2867    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2868
2869    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2870    settings.update(ANDROID_FLASH_FIRING_POWER,
2871            &flashFiringLevel, 1);
2872
2873    /* lens */
2874    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2875    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2876
2877    if (gCamCapability[mCameraId]->filter_densities_count) {
2878        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2879        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2880                        gCamCapability[mCameraId]->filter_densities_count);
2881    }
2882
2883    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2884    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2885
2886    /* frame duration */
2887    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2888    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2889
2890    /* sensitivity */
2891    static const int32_t default_sensitivity = 100;
2892    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2893
2894    /*edge mode*/
2895    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2896    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2897
2898    /*noise reduction mode*/
2899    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2900    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2901
2902    /*color correction mode*/
2903    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2904    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2905
2906    /*transform matrix mode*/
2907    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2908    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2909
2910    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
2911    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
2912
2913    mDefaultMetadata[type] = settings.release();
2914
2915    pthread_mutex_unlock(&mMutex);
2916    return mDefaultMetadata[type];
2917}
2918
2919/*===========================================================================
2920 * FUNCTION   : setFrameParameters
2921 *
2922 * DESCRIPTION: set parameters per frame as requested in the metadata from
2923 *              framework
2924 *
2925 * PARAMETERS :
2926 *   @request   : request that needs to be serviced
2927 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2928 *
2929 * RETURN     : success: NO_ERROR
2930 *              failure:
2931 *==========================================================================*/
2932int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2933                    uint32_t streamTypeMask)
2934{
2935    /*translate from camera_metadata_t type to parm_type_t*/
2936    int rc = 0;
2937    if (request->settings == NULL && mFirstRequest) {
2938        /*settings cannot be null for the first request*/
2939        return BAD_VALUE;
2940    }
2941
2942    int32_t hal_version = CAM_HAL_V3;
2943
2944    memset(mParameters, 0, sizeof(parm_buffer_t));
2945    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2946    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2947                sizeof(hal_version), &hal_version);
2948    if (rc < 0) {
2949        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2950        return BAD_VALUE;
2951    }
2952
2953    /*we need to update the frame number in the parameters*/
2954    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2955                                sizeof(request->frame_number), &(request->frame_number));
2956    if (rc < 0) {
2957        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2958        return BAD_VALUE;
2959    }
2960
2961    /* Update stream id mask where buffers are requested */
2962    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2963                                sizeof(streamTypeMask), &streamTypeMask);
2964    if (rc < 0) {
2965        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2966        return BAD_VALUE;
2967    }
2968
2969    if(request->settings != NULL){
2970        rc = translateMetadataToParameters(request);
2971    }
2972    /*set the parameters to backend*/
2973    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2974    return rc;
2975}
2976
2977/*===========================================================================
2978 * FUNCTION   : translateMetadataToParameters
2979 *
2980 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2981 *
2982 *
2983 * PARAMETERS :
2984 *   @request  : request sent from framework
2985 *
2986 *
2987 * RETURN     : success: NO_ERROR
2988 *              failure:
2989 *==========================================================================*/
2990int QCamera3HardwareInterface::translateMetadataToParameters
2991                                  (const camera3_capture_request_t *request)
2992{
2993    int rc = 0;
2994    CameraMetadata frame_settings;
2995    frame_settings = request->settings;
2996
2997    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2998        int32_t antibandingMode =
2999            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3000        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3001                sizeof(antibandingMode), &antibandingMode);
3002    }
3003
3004    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3005        int32_t expCompensation = frame_settings.find(
3006            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3007        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3008            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3009        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3010            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3011        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3012          sizeof(expCompensation), &expCompensation);
3013    }
3014
3015    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3016        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3017        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3018                sizeof(aeLock), &aeLock);
3019    }
3020    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3021        cam_fps_range_t fps_range;
3022        fps_range.min_fps =
3023            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3024        fps_range.max_fps =
3025            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3026        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3027                sizeof(fps_range), &fps_range);
3028    }
3029
3030    float focalDistance = -1.0;
3031    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3032        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3033        rc = AddSetParmEntryToBatch(mParameters,
3034                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3035                sizeof(focalDistance), &focalDistance);
3036    }
3037
3038    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3039        uint8_t fwk_focusMode =
3040            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3041        uint8_t focusMode;
3042        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3043            focusMode = CAM_FOCUS_MODE_INFINITY;
3044        } else{
3045         focusMode = lookupHalName(FOCUS_MODES_MAP,
3046                                   sizeof(FOCUS_MODES_MAP),
3047                                   fwk_focusMode);
3048        }
3049        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3050                sizeof(focusMode), &focusMode);
3051    }
3052
3053    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3054        uint8_t awbLock =
3055            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3056        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3057                sizeof(awbLock), &awbLock);
3058    }
3059
3060    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3061        uint8_t fwk_whiteLevel =
3062            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3063        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3064                sizeof(WHITE_BALANCE_MODES_MAP),
3065                fwk_whiteLevel);
3066        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3067                sizeof(whiteLevel), &whiteLevel);
3068    }
3069
3070    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3071        uint8_t fwk_effectMode =
3072            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3073        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3074                sizeof(EFFECT_MODES_MAP),
3075                fwk_effectMode);
3076        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3077                sizeof(effectMode), &effectMode);
3078    }
3079
3080    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3081        uint8_t fwk_aeMode =
3082            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3083        uint8_t aeMode;
3084        int32_t redeye;
3085
3086        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3087            aeMode = CAM_AE_MODE_OFF;
3088        } else {
3089            aeMode = CAM_AE_MODE_ON;
3090        }
3091        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3092            redeye = 1;
3093        } else {
3094            redeye = 0;
3095        }
3096
3097        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3098                                          sizeof(AE_FLASH_MODE_MAP),
3099                                          fwk_aeMode);
3100        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3101                sizeof(aeMode), &aeMode);
3102        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3103                sizeof(flashMode), &flashMode);
3104        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3105                sizeof(redeye), &redeye);
3106    }
3107
3108    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3109        uint8_t colorCorrectMode =
3110            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3111        rc =
3112            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3113                    sizeof(colorCorrectMode), &colorCorrectMode);
3114    }
3115
3116    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3117        cam_color_correct_gains_t colorCorrectGains;
3118        for (int i = 0; i < 4; i++) {
3119            colorCorrectGains.gains[i] =
3120                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3121        }
3122        rc =
3123            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3124                    sizeof(colorCorrectGains), &colorCorrectGains);
3125    }
3126
3127    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3128        cam_color_correct_matrix_t colorCorrectTransform;
3129        cam_rational_type_t transform_elem;
3130        int num = 0;
3131        for (int i = 0; i < 3; i++) {
3132           for (int j = 0; j < 3; j++) {
3133              transform_elem.numerator =
3134                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3135              transform_elem.denominator =
3136                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3137              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3138              num++;
3139           }
3140        }
3141        rc =
3142            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3143                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3144    }
3145
3146    cam_trigger_t aecTrigger;
3147    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3148    aecTrigger.trigger_id = -1;
3149    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3150        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3151        aecTrigger.trigger =
3152            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3153        aecTrigger.trigger_id =
3154            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3155    }
3156    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3157                                sizeof(aecTrigger), &aecTrigger);
3158
3159    /*af_trigger must come with a trigger id*/
3160    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3161        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3162        cam_trigger_t af_trigger;
3163        af_trigger.trigger =
3164            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3165        af_trigger.trigger_id =
3166            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3167        rc = AddSetParmEntryToBatch(mParameters,
3168                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3169    }
3170
3171    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3172        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3173        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3174                sizeof(metaMode), &metaMode);
3175        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3176           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3177           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3178                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3179                                             fwk_sceneMode);
3180           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3181                sizeof(sceneMode), &sceneMode);
3182        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3183           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3184           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3185                sizeof(sceneMode), &sceneMode);
3186        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3187           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3188           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3189                sizeof(sceneMode), &sceneMode);
3190        }
3191    }
3192
3193    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3194        int32_t demosaic =
3195            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3196        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3197                sizeof(demosaic), &demosaic);
3198    }
3199
3200    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3201        cam_edge_application_t edge_application;
3202        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3203        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3204            edge_application.sharpness = 0;
3205        } else {
3206            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3207                int32_t edgeStrength =
3208                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3209                edge_application.sharpness = edgeStrength;
3210            } else {
3211                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3212            }
3213        }
3214        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3215                sizeof(edge_application), &edge_application);
3216    }
3217
3218    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3219        int32_t respectFlashMode = 1;
3220        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3221            uint8_t fwk_aeMode =
3222                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3223            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3224                respectFlashMode = 0;
3225                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3226                    __func__);
3227            }
3228        }
3229        if (respectFlashMode) {
3230            uint8_t flashMode =
3231                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3232            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3233                                          sizeof(FLASH_MODES_MAP),
3234                                          flashMode);
3235            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3236            // To check: CAM_INTF_META_FLASH_MODE usage
3237            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3238                          sizeof(flashMode), &flashMode);
3239        }
3240    }
3241
3242    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3243        uint8_t flashPower =
3244            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3245        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3246                sizeof(flashPower), &flashPower);
3247    }
3248
3249    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3250        int64_t flashFiringTime =
3251            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3252        rc = AddSetParmEntryToBatch(mParameters,
3253                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3254    }
3255
3256    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3257        uint8_t geometricMode =
3258            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3259        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3260                sizeof(geometricMode), &geometricMode);
3261    }
3262
3263    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3264        uint8_t geometricStrength =
3265            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3266        rc = AddSetParmEntryToBatch(mParameters,
3267                CAM_INTF_META_GEOMETRIC_STRENGTH,
3268                sizeof(geometricStrength), &geometricStrength);
3269    }
3270
3271    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3272        uint8_t hotPixelMode =
3273            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3274        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3275                sizeof(hotPixelMode), &hotPixelMode);
3276    }
3277
3278    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3279        float lensAperture =
3280            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3281        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3282                sizeof(lensAperture), &lensAperture);
3283    }
3284
3285    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3286        float filterDensity =
3287            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3288        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3289                sizeof(filterDensity), &filterDensity);
3290    }
3291
3292    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3293        float focalLength =
3294            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3295        rc = AddSetParmEntryToBatch(mParameters,
3296                CAM_INTF_META_LENS_FOCAL_LENGTH,
3297                sizeof(focalLength), &focalLength);
3298    }
3299
3300    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3301        uint8_t optStabMode =
3302            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3303        rc = AddSetParmEntryToBatch(mParameters,
3304                CAM_INTF_META_LENS_OPT_STAB_MODE,
3305                sizeof(optStabMode), &optStabMode);
3306    }
3307
3308    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3309        uint8_t noiseRedMode =
3310            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3311        rc = AddSetParmEntryToBatch(mParameters,
3312                CAM_INTF_META_NOISE_REDUCTION_MODE,
3313                sizeof(noiseRedMode), &noiseRedMode);
3314    }
3315
3316    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3317        uint8_t noiseRedStrength =
3318            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3319        rc = AddSetParmEntryToBatch(mParameters,
3320                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3321                sizeof(noiseRedStrength), &noiseRedStrength);
3322    }
3323
3324    cam_crop_region_t scalerCropRegion;
3325    bool scalerCropSet = false;
3326    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3327        scalerCropRegion.left =
3328            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3329        scalerCropRegion.top =
3330            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3331        scalerCropRegion.width =
3332            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3333        scalerCropRegion.height =
3334            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3335        rc = AddSetParmEntryToBatch(mParameters,
3336                CAM_INTF_META_SCALER_CROP_REGION,
3337                sizeof(scalerCropRegion), &scalerCropRegion);
3338        scalerCropSet = true;
3339    }
3340
3341    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3342        int64_t sensorExpTime =
3343            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3344        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3345        rc = AddSetParmEntryToBatch(mParameters,
3346                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3347                sizeof(sensorExpTime), &sensorExpTime);
3348    }
3349
3350    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3351        int64_t sensorFrameDuration =
3352            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3353        int64_t minFrameDuration = getMinFrameDuration(request);
3354        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3355        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3356            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3357        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3358        rc = AddSetParmEntryToBatch(mParameters,
3359                CAM_INTF_META_SENSOR_FRAME_DURATION,
3360                sizeof(sensorFrameDuration), &sensorFrameDuration);
3361    }
3362
3363    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3364        int32_t sensorSensitivity =
3365            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3366        if (sensorSensitivity <
3367                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3368            sensorSensitivity =
3369                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3370        if (sensorSensitivity >
3371                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3372            sensorSensitivity =
3373                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3374        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3375        rc = AddSetParmEntryToBatch(mParameters,
3376                CAM_INTF_META_SENSOR_SENSITIVITY,
3377                sizeof(sensorSensitivity), &sensorSensitivity);
3378    }
3379
3380    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3381        int32_t shadingMode =
3382            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3383        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3384                sizeof(shadingMode), &shadingMode);
3385    }
3386
3387    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3388        uint8_t shadingStrength =
3389            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3390        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3391                sizeof(shadingStrength), &shadingStrength);
3392    }
3393
3394    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3395        uint8_t fwk_facedetectMode =
3396            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3397        uint8_t facedetectMode =
3398            lookupHalName(FACEDETECT_MODES_MAP,
3399                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3400        rc = AddSetParmEntryToBatch(mParameters,
3401                CAM_INTF_META_STATS_FACEDETECT_MODE,
3402                sizeof(facedetectMode), &facedetectMode);
3403    }
3404
3405    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3406        uint8_t histogramMode =
3407            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3408        rc = AddSetParmEntryToBatch(mParameters,
3409                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3410                sizeof(histogramMode), &histogramMode);
3411    }
3412
3413    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3414        uint8_t sharpnessMapMode =
3415            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3416        rc = AddSetParmEntryToBatch(mParameters,
3417                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3418                sizeof(sharpnessMapMode), &sharpnessMapMode);
3419    }
3420
3421    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3422        uint8_t tonemapMode =
3423            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3424        rc = AddSetParmEntryToBatch(mParameters,
3425                CAM_INTF_META_TONEMAP_MODE,
3426                sizeof(tonemapMode), &tonemapMode);
3427    }
3428    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3429    /*All tonemap channels will have the same number of points*/
3430    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3431        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3432        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3433        cam_rgb_tonemap_curves tonemapCurves;
3434        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3435
3436        /* ch0 = G*/
3437        int point = 0;
3438        cam_tonemap_curve_t tonemapCurveGreen;
3439        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3440            for (int j = 0; j < 2; j++) {
3441               tonemapCurveGreen.tonemap_points[i][j] =
3442                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3443               point++;
3444            }
3445        }
3446        tonemapCurves.curves[0] = tonemapCurveGreen;
3447
3448        /* ch 1 = B */
3449        point = 0;
3450        cam_tonemap_curve_t tonemapCurveBlue;
3451        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3452            for (int j = 0; j < 2; j++) {
3453               tonemapCurveBlue.tonemap_points[i][j] =
3454                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3455               point++;
3456            }
3457        }
3458        tonemapCurves.curves[1] = tonemapCurveBlue;
3459
3460        /* ch 2 = R */
3461        point = 0;
3462        cam_tonemap_curve_t tonemapCurveRed;
3463        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3464            for (int j = 0; j < 2; j++) {
3465               tonemapCurveRed.tonemap_points[i][j] =
3466                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3467               point++;
3468            }
3469        }
3470        tonemapCurves.curves[2] = tonemapCurveRed;
3471
3472        rc = AddSetParmEntryToBatch(mParameters,
3473                CAM_INTF_META_TONEMAP_CURVES,
3474                sizeof(tonemapCurves), &tonemapCurves);
3475    }
3476
3477    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3478        uint8_t captureIntent =
3479            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3480        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3481                sizeof(captureIntent), &captureIntent);
3482    }
3483
3484    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3485        uint8_t blackLevelLock =
3486            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3487        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3488                sizeof(blackLevelLock), &blackLevelLock);
3489    }
3490
3491    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3492        uint8_t lensShadingMapMode =
3493            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3494        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3495                sizeof(lensShadingMapMode), &lensShadingMapMode);
3496    }
3497
3498    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3499        cam_area_t roi;
3500        bool reset = true;
3501        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3502        if (scalerCropSet) {
3503            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3504        }
3505        if (reset) {
3506            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3507                    sizeof(roi), &roi);
3508        }
3509    }
3510
3511    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3512        cam_area_t roi;
3513        bool reset = true;
3514        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3515        if (scalerCropSet) {
3516            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3517        }
3518        if (reset) {
3519            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3520                    sizeof(roi), &roi);
3521        }
3522    }
3523
3524    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3525        cam_area_t roi;
3526        bool reset = true;
3527        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3528        if (scalerCropSet) {
3529            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3530        }
3531        if (reset) {
3532            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3533                    sizeof(roi), &roi);
3534        }
3535    }
3536    return rc;
3537}
3538
3539/*===========================================================================
3540 * FUNCTION   : getJpegSettings
3541 *
3542 * DESCRIPTION: save the jpeg settings in the HAL
3543 *
3544 *
3545 * PARAMETERS :
3546 *   @settings  : frame settings information from framework
3547 *
3548 *
3549 * RETURN     : success: NO_ERROR
3550 *              failure:
3551 *==========================================================================*/
3552int QCamera3HardwareInterface::getJpegSettings
3553                                  (const camera_metadata_t *settings)
3554{
3555    if (mJpegSettings) {
3556        if (mJpegSettings->gps_timestamp) {
3557            free(mJpegSettings->gps_timestamp);
3558            mJpegSettings->gps_timestamp = NULL;
3559        }
3560        if (mJpegSettings->gps_coordinates) {
3561            for (int i = 0; i < 3; i++) {
3562                free(mJpegSettings->gps_coordinates[i]);
3563                mJpegSettings->gps_coordinates[i] = NULL;
3564            }
3565        }
3566        free(mJpegSettings);
3567        mJpegSettings = NULL;
3568    }
3569    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3570    CameraMetadata jpeg_settings;
3571    jpeg_settings = settings;
3572
3573    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3574        mJpegSettings->jpeg_orientation =
3575            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3576    } else {
3577        mJpegSettings->jpeg_orientation = 0;
3578    }
3579    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3580        mJpegSettings->jpeg_quality =
3581            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3582    } else {
3583        mJpegSettings->jpeg_quality = 85;
3584    }
3585    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3586        mJpegSettings->thumbnail_size.width =
3587            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3588        mJpegSettings->thumbnail_size.height =
3589            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3590    } else {
3591        mJpegSettings->thumbnail_size.width = 0;
3592        mJpegSettings->thumbnail_size.height = 0;
3593    }
3594    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3595        for (int i = 0; i < 3; i++) {
3596            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3597            *(mJpegSettings->gps_coordinates[i]) =
3598                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3599        }
3600    } else{
3601       for (int i = 0; i < 3; i++) {
3602            mJpegSettings->gps_coordinates[i] = NULL;
3603        }
3604    }
3605
3606    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3607        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3608        *(mJpegSettings->gps_timestamp) =
3609            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3610    } else {
3611        mJpegSettings->gps_timestamp = NULL;
3612    }
3613
3614    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3615        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3616        for (int i = 0; i < len; i++) {
3617            mJpegSettings->gps_processing_method[i] =
3618                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3619        }
3620        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3621            mJpegSettings->gps_processing_method[len] = '\0';
3622        }
3623    } else {
3624        mJpegSettings->gps_processing_method[0] = '\0';
3625    }
3626
3627    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3628        mJpegSettings->sensor_sensitivity =
3629            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3630    } else {
3631        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3632    }
3633
3634    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3635
3636    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3637        mJpegSettings->lens_focal_length =
3638            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3639    }
3640    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3641        mJpegSettings->exposure_compensation =
3642            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3643    }
3644    mJpegSettings->sharpness = 10; //default value
3645    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3646        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3647        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3648            mJpegSettings->sharpness = 0;
3649        }
3650    }
3651    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3652    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3653    mJpegSettings->is_jpeg_format = true;
3654    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3655    return 0;
3656}
3657
3658/*===========================================================================
3659 * FUNCTION   : captureResultCb
3660 *
3661 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3662 *
3663 * PARAMETERS :
3664 *   @frame  : frame information from mm-camera-interface
3665 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3666 *   @userdata: userdata
3667 *
3668 * RETURN     : NONE
3669 *==========================================================================*/
3670void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3671                camera3_stream_buffer_t *buffer,
3672                uint32_t frame_number, void *userdata)
3673{
3674    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3675    if (hw == NULL) {
3676        ALOGE("%s: Invalid hw %p", __func__, hw);
3677        return;
3678    }
3679
3680    hw->captureResultCb(metadata, buffer, frame_number);
3681    return;
3682}
3683
3684
3685/*===========================================================================
3686 * FUNCTION   : initialize
3687 *
3688 * DESCRIPTION: Pass framework callback pointers to HAL
3689 *
3690 * PARAMETERS :
3691 *
3692 *
3693 * RETURN     : Success : 0
3694 *              Failure: -ENODEV
3695 *==========================================================================*/
3696
3697int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3698                                  const camera3_callback_ops_t *callback_ops)
3699{
3700    ALOGV("%s: E", __func__);
3701    QCamera3HardwareInterface *hw =
3702        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3703    if (!hw) {
3704        ALOGE("%s: NULL camera device", __func__);
3705        return -ENODEV;
3706    }
3707
3708    int rc = hw->initialize(callback_ops);
3709    ALOGV("%s: X", __func__);
3710    return rc;
3711}
3712
3713/*===========================================================================
3714 * FUNCTION   : configure_streams
3715 *
3716 * DESCRIPTION:
3717 *
3718 * PARAMETERS :
3719 *
3720 *
3721 * RETURN     : Success: 0
3722 *              Failure: -EINVAL (if stream configuration is invalid)
3723 *                       -ENODEV (fatal error)
3724 *==========================================================================*/
3725
3726int QCamera3HardwareInterface::configure_streams(
3727        const struct camera3_device *device,
3728        camera3_stream_configuration_t *stream_list)
3729{
3730    ALOGV("%s: E", __func__);
3731    QCamera3HardwareInterface *hw =
3732        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3733    if (!hw) {
3734        ALOGE("%s: NULL camera device", __func__);
3735        return -ENODEV;
3736    }
3737    int rc = hw->configureStreams(stream_list);
3738    ALOGV("%s: X", __func__);
3739    return rc;
3740}
3741
3742/*===========================================================================
3743 * FUNCTION   : register_stream_buffers
3744 *
3745 * DESCRIPTION: Register stream buffers with the device
3746 *
3747 * PARAMETERS :
3748 *
3749 * RETURN     :
3750 *==========================================================================*/
3751int QCamera3HardwareInterface::register_stream_buffers(
3752        const struct camera3_device *device,
3753        const camera3_stream_buffer_set_t *buffer_set)
3754{
3755    ALOGV("%s: E", __func__);
3756    QCamera3HardwareInterface *hw =
3757        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3758    if (!hw) {
3759        ALOGE("%s: NULL camera device", __func__);
3760        return -ENODEV;
3761    }
3762    int rc = hw->registerStreamBuffers(buffer_set);
3763    ALOGV("%s: X", __func__);
3764    return rc;
3765}
3766
3767/*===========================================================================
3768 * FUNCTION   : construct_default_request_settings
3769 *
3770 * DESCRIPTION: Configure a settings buffer to meet the required use case
3771 *
3772 * PARAMETERS :
3773 *
3774 *
3775 * RETURN     : Success: Return valid metadata
3776 *              Failure: Return NULL
3777 *==========================================================================*/
3778const camera_metadata_t* QCamera3HardwareInterface::
3779    construct_default_request_settings(const struct camera3_device *device,
3780                                        int type)
3781{
3782
3783    ALOGV("%s: E", __func__);
3784    camera_metadata_t* fwk_metadata = NULL;
3785    QCamera3HardwareInterface *hw =
3786        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3787    if (!hw) {
3788        ALOGE("%s: NULL camera device", __func__);
3789        return NULL;
3790    }
3791
3792    fwk_metadata = hw->translateCapabilityToMetadata(type);
3793
3794    ALOGV("%s: X", __func__);
3795    return fwk_metadata;
3796}
3797
3798/*===========================================================================
3799 * FUNCTION   : process_capture_request
3800 *
3801 * DESCRIPTION:
3802 *
3803 * PARAMETERS :
3804 *
3805 *
3806 * RETURN     :
3807 *==========================================================================*/
3808int QCamera3HardwareInterface::process_capture_request(
3809                    const struct camera3_device *device,
3810                    camera3_capture_request_t *request)
3811{
3812    ALOGV("%s: E", __func__);
3813    QCamera3HardwareInterface *hw =
3814        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3815    if (!hw) {
3816        ALOGE("%s: NULL camera device", __func__);
3817        return -EINVAL;
3818    }
3819
3820    int rc = hw->processCaptureRequest(request);
3821    ALOGV("%s: X", __func__);
3822    return rc;
3823}
3824
3825/*===========================================================================
3826 * FUNCTION   : get_metadata_vendor_tag_ops
3827 *
3828 * DESCRIPTION:
3829 *
3830 * PARAMETERS :
3831 *
3832 *
3833 * RETURN     :
3834 *==========================================================================*/
3835
3836void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3837                const struct camera3_device *device,
3838                vendor_tag_query_ops_t* ops)
3839{
3840    ALOGV("%s: E", __func__);
3841    QCamera3HardwareInterface *hw =
3842        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3843    if (!hw) {
3844        ALOGE("%s: NULL camera device", __func__);
3845        return;
3846    }
3847
3848    hw->getMetadataVendorTagOps(ops);
3849    ALOGV("%s: X", __func__);
3850    return;
3851}
3852
3853/*===========================================================================
3854 * FUNCTION   : dump
3855 *
3856 * DESCRIPTION:
3857 *
3858 * PARAMETERS :
3859 *
3860 *
3861 * RETURN     :
3862 *==========================================================================*/
3863
3864void QCamera3HardwareInterface::dump(
3865                const struct camera3_device *device, int fd)
3866{
3867    ALOGV("%s: E", __func__);
3868    QCamera3HardwareInterface *hw =
3869        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3870    if (!hw) {
3871        ALOGE("%s: NULL camera device", __func__);
3872        return;
3873    }
3874
3875    hw->dump(fd);
3876    ALOGV("%s: X", __func__);
3877    return;
3878}
3879
3880/*===========================================================================
3881 * FUNCTION   : flush
3882 *
3883 * DESCRIPTION:
3884 *
3885 * PARAMETERS :
3886 *
3887 *
3888 * RETURN     :
3889 *==========================================================================*/
3890
3891int QCamera3HardwareInterface::flush(
3892                const struct camera3_device *device)
3893{
3894    int rc;
3895    ALOGV("%s: E", __func__);
3896    QCamera3HardwareInterface *hw =
3897        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3898    if (!hw) {
3899        ALOGE("%s: NULL camera device", __func__);
3900        return -EINVAL;
3901    }
3902
3903    rc = hw->flush();
3904    ALOGV("%s: X", __func__);
3905    return rc;
3906}
3907
3908/*===========================================================================
3909 * FUNCTION   : close_camera_device
3910 *
3911 * DESCRIPTION:
3912 *
3913 * PARAMETERS :
3914 *
3915 *
3916 * RETURN     :
3917 *==========================================================================*/
3918int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3919{
3920    ALOGV("%s: E", __func__);
3921    int ret = NO_ERROR;
3922    QCamera3HardwareInterface *hw =
3923        reinterpret_cast<QCamera3HardwareInterface *>(
3924            reinterpret_cast<camera3_device_t *>(device)->priv);
3925    if (!hw) {
3926        ALOGE("NULL camera device");
3927        return BAD_VALUE;
3928    }
3929    delete hw;
3930
3931    pthread_mutex_lock(&mCameraSessionLock);
3932    mCameraSessionActive = 0;
3933    pthread_mutex_unlock(&mCameraSessionLock);
3934    ALOGV("%s: X", __func__);
3935    return ret;
3936}
3937
3938/*===========================================================================
3939 * FUNCTION   : getWaveletDenoiseProcessPlate
3940 *
3941 * DESCRIPTION: query wavelet denoise process plate
3942 *
3943 * PARAMETERS : None
3944 *
3945 * RETURN     : WNR prcocess plate vlaue
3946 *==========================================================================*/
3947cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3948{
3949    char prop[PROPERTY_VALUE_MAX];
3950    memset(prop, 0, sizeof(prop));
3951    property_get("persist.denoise.process.plates", prop, "0");
3952    int processPlate = atoi(prop);
3953    switch(processPlate) {
3954    case 0:
3955        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3956    case 1:
3957        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3958    case 2:
3959        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3960    case 3:
3961        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3962    default:
3963        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3964    }
3965}
3966
3967/*===========================================================================
3968 * FUNCTION   : needRotationReprocess
3969 *
3970 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3971 *
3972 * PARAMETERS : none
3973 *
3974 * RETURN     : true: needed
3975 *              false: no need
3976 *==========================================================================*/
3977bool QCamera3HardwareInterface::needRotationReprocess()
3978{
3979
3980    if (!mJpegSettings->is_jpeg_format) {
3981        // RAW image, no need to reprocess
3982        return false;
3983    }
3984
3985    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3986        mJpegSettings->jpeg_orientation > 0) {
3987        // current rotation is not zero, and pp has the capability to process rotation
3988        ALOGD("%s: need do reprocess for rotation", __func__);
3989        return true;
3990    }
3991
3992    return false;
3993}
3994
3995/*===========================================================================
3996 * FUNCTION   : needReprocess
3997 *
3998 * DESCRIPTION: if reprocess in needed
3999 *
4000 * PARAMETERS : none
4001 *
4002 * RETURN     : true: needed
4003 *              false: no need
4004 *==========================================================================*/
4005bool QCamera3HardwareInterface::needReprocess()
4006{
4007    if (!mJpegSettings->is_jpeg_format) {
4008        // RAW image, no need to reprocess
4009        return false;
4010    }
4011
4012    if ((mJpegSettings->min_required_pp_mask > 0) ||
4013         isWNREnabled()) {
4014        // TODO: add for ZSL HDR later
4015        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4016        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4017        return true;
4018    }
4019    return needRotationReprocess();
4020}
4021
4022/*===========================================================================
4023 * FUNCTION   : addOnlineReprocChannel
4024 *
4025 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4026 *              coming from input channel
4027 *
4028 * PARAMETERS :
4029 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4030 *
4031 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4032 *==========================================================================*/
4033QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4034              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4035{
4036    int32_t rc = NO_ERROR;
4037    QCamera3ReprocessChannel *pChannel = NULL;
4038    if (pInputChannel == NULL) {
4039        ALOGE("%s: input channel obj is NULL", __func__);
4040        return NULL;
4041    }
4042
4043    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4044            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4045    if (NULL == pChannel) {
4046        ALOGE("%s: no mem for reprocess channel", __func__);
4047        return NULL;
4048    }
4049
4050    // Capture channel, only need snapshot and postview streams start together
4051    mm_camera_channel_attr_t attr;
4052    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4053    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4054    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4055    rc = pChannel->initialize();
4056    if (rc != NO_ERROR) {
4057        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4058        delete pChannel;
4059        return NULL;
4060    }
4061
4062    // pp feature config
4063    cam_pp_feature_config_t pp_config;
4064    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4065    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4066        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4067        pp_config.sharpness = mJpegSettings->sharpness;
4068    }
4069
4070    if (isWNREnabled()) {
4071        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4072        pp_config.denoise2d.denoise_enable = 1;
4073        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4074    }
4075    if (needRotationReprocess()) {
4076        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4077        int rotation = mJpegSettings->jpeg_orientation;
4078        if (rotation == 0) {
4079            pp_config.rotation = ROTATE_0;
4080        } else if (rotation == 90) {
4081            pp_config.rotation = ROTATE_90;
4082        } else if (rotation == 180) {
4083            pp_config.rotation = ROTATE_180;
4084        } else if (rotation == 270) {
4085            pp_config.rotation = ROTATE_270;
4086        }
4087    }
4088
4089   rc = pChannel->addReprocStreamsFromSource(pp_config,
4090                                             pInputChannel,
4091                                             mMetadataChannel);
4092
4093    if (rc != NO_ERROR) {
4094        delete pChannel;
4095        return NULL;
4096    }
4097    return pChannel;
4098}
4099
4100int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4101{
4102    return gCamCapability[mCameraId]->min_num_pp_bufs;
4103}
4104
4105bool QCamera3HardwareInterface::isWNREnabled() {
4106    return gCamCapability[mCameraId]->isWnrSupported;
4107}
4108
4109}; //end namespace qcamera
4110