QCamera3HWI.cpp revision 73b2b0e1b5b94d44191188070f3776a984142330
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    mCallbackOps = callback_ops;
405
406    pthread_mutex_unlock(&mMutex);
407    mCameraInitialized = true;
408    return 0;
409
410err1:
411    pthread_mutex_unlock(&mMutex);
412    return rc;
413}
414
415/*===========================================================================
416 * FUNCTION   : configureStreams
417 *
418 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
419 *              and output streams.
420 *
421 * PARAMETERS :
422 *   @stream_list : streams to be configured
423 *
424 * RETURN     :
425 *
426 *==========================================================================*/
427int QCamera3HardwareInterface::configureStreams(
428        camera3_stream_configuration_t *streamList)
429{
430    int rc = 0;
431    mIsZslMode = false;
432
433    // Sanity check stream_list
434    if (streamList == NULL) {
435        ALOGE("%s: NULL stream configuration", __func__);
436        return BAD_VALUE;
437    }
438    if (streamList->streams == NULL) {
439        ALOGE("%s: NULL stream list", __func__);
440        return BAD_VALUE;
441    }
442
443    if (streamList->num_streams < 1) {
444        ALOGE("%s: Bad number of streams requested: %d", __func__,
445                streamList->num_streams);
446        return BAD_VALUE;
447    }
448
449    /* first invalidate all the steams in the mStreamList
450     * if they appear again, they will be validated */
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end(); it++) {
453        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454        channel->stop();
455        (*it)->status = INVALID;
456    }
457    if (mMetadataChannel) {
458        /* If content of mStreamInfo is not 0, there is metadata stream */
459        mMetadataChannel->stop();
460    }
461
462    pthread_mutex_lock(&mMutex);
463
464    camera3_stream_t *inputStream = NULL;
465    camera3_stream_t *jpegStream = NULL;
466    cam_stream_size_info_t stream_config_info;
467
468    for (size_t i = 0; i < streamList->num_streams; i++) {
469        camera3_stream_t *newStream = streamList->streams[i];
470        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
471                __func__, newStream->stream_type, newStream->format,
472                 newStream->width, newStream->height);
473        //if the stream is in the mStreamList validate it
474        bool stream_exists = false;
475        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
476                it != mStreamInfo.end(); it++) {
477            if ((*it)->stream == newStream) {
478                QCamera3Channel *channel =
479                    (QCamera3Channel*)(*it)->stream->priv;
480                stream_exists = true;
481                (*it)->status = RECONFIGURE;
482                /*delete the channel object associated with the stream because
483                  we need to reconfigure*/
484                delete channel;
485                (*it)->stream->priv = NULL;
486            }
487        }
488        if (!stream_exists) {
489            //new stream
490            stream_info_t* stream_info;
491            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
492            stream_info->stream = newStream;
493            stream_info->status = VALID;
494            stream_info->registered = 0;
495            mStreamInfo.push_back(stream_info);
496        }
497        if (newStream->stream_type == CAMERA3_STREAM_INPUT
498                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
499            if (inputStream != NULL) {
500                ALOGE("%s: Multiple input streams requested!", __func__);
501                pthread_mutex_unlock(&mMutex);
502                return BAD_VALUE;
503            }
504            inputStream = newStream;
505        }
506        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
507            jpegStream = newStream;
508        }
509    }
510    mInputStream = inputStream;
511
512    /*clean up invalid streams*/
513    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
514            it != mStreamInfo.end();) {
515        if(((*it)->status) == INVALID){
516            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
517            delete channel;
518            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
519            free(*it);
520            it = mStreamInfo.erase(it);
521        } else {
522            it++;
523        }
524    }
525    if (mMetadataChannel) {
526        delete mMetadataChannel;
527        mMetadataChannel = NULL;
528    }
529
530    //Create metadata channel and initialize it
531    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
532                    mCameraHandle->ops, captureResultCb,
533                    &gCamCapability[mCameraId]->padding_info, this);
534    if (mMetadataChannel == NULL) {
535        ALOGE("%s: failed to allocate metadata channel", __func__);
536        rc = -ENOMEM;
537        pthread_mutex_unlock(&mMutex);
538        return rc;
539    }
540    rc = mMetadataChannel->initialize();
541    if (rc < 0) {
542        ALOGE("%s: metadata channel initialization failed", __func__);
543        delete mMetadataChannel;
544        pthread_mutex_unlock(&mMutex);
545        return rc;
546    }
547
548    /* Allocate channel objects for the requested streams */
549    for (size_t i = 0; i < streamList->num_streams; i++) {
550        camera3_stream_t *newStream = streamList->streams[i];
551        uint32_t stream_usage = newStream->usage;
552        stream_config_info.stream_sizes[i].width = newStream->width;
553        stream_config_info.stream_sizes[i].height = newStream->height;
554        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
555            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
556            //for zsl stream the size is jpeg size
557            stream_config_info.stream_sizes[i].width = jpegStream->width;
558            stream_config_info.stream_sizes[i].height = jpegStream->height;
559            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
560        } else {
561           //for non zsl streams find out the format
562           switch (newStream->format) {
563           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
564              {
565                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
566                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
567                 } else {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
569                 }
570              }
571              break;
572           case HAL_PIXEL_FORMAT_YCbCr_420_888:
573              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
574              break;
575           case HAL_PIXEL_FORMAT_BLOB:
576              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
577              break;
578           default:
579              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
580              break;
581           }
582        }
583        if (newStream->priv == NULL) {
584            //New stream, construct channel
585            switch (newStream->stream_type) {
586            case CAMERA3_STREAM_INPUT:
587                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
588                break;
589            case CAMERA3_STREAM_BIDIRECTIONAL:
590                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
591                    GRALLOC_USAGE_HW_CAMERA_WRITE;
592                break;
593            case CAMERA3_STREAM_OUTPUT:
594                /* For video encoding stream, set read/write rarely
595                 * flag so that they may be set to un-cached */
596                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
597                    newStream->usage =
598                         (GRALLOC_USAGE_SW_READ_RARELY |
599                         GRALLOC_USAGE_SW_WRITE_RARELY |
600                         GRALLOC_USAGE_HW_CAMERA_WRITE);
601                else
602                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
603                break;
604            default:
605                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
606                break;
607            }
608
609            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
610                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
611                QCamera3Channel *channel;
612                switch (newStream->format) {
613                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
614                case HAL_PIXEL_FORMAT_YCbCr_420_888:
615                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
616                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
617                        jpegStream) {
618                        uint32_t width = jpegStream->width;
619                        uint32_t height = jpegStream->height;
620                        mIsZslMode = true;
621                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
622                            mCameraHandle->ops, captureResultCb,
623                            &gCamCapability[mCameraId]->padding_info, this, newStream,
624                            width, height);
625                    } else
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream);
629                    if (channel == NULL) {
630                        ALOGE("%s: allocation of channel failed", __func__);
631                        pthread_mutex_unlock(&mMutex);
632                        return -ENOMEM;
633                    }
634
635                    newStream->priv = channel;
636                    break;
637                case HAL_PIXEL_FORMAT_BLOB:
638                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
639                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
640                            mCameraHandle->ops, captureResultCb,
641                            &gCamCapability[mCameraId]->padding_info, this, newStream);
642                    if (mPictureChannel == NULL) {
643                        ALOGE("%s: allocation of channel failed", __func__);
644                        pthread_mutex_unlock(&mMutex);
645                        return -ENOMEM;
646                    }
647                    newStream->priv = (QCamera3Channel*)mPictureChannel;
648                    break;
649
650                //TODO: Add support for app consumed format?
651                default:
652                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
653                    break;
654                }
655            }
656        } else {
657            // Channel already exists for this stream
658            // Do nothing for now
659        }
660    }
661    /*For the streams to be reconfigured we need to register the buffers
662      since the framework wont*/
663    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664            it != mStreamInfo.end(); it++) {
665        if ((*it)->status == RECONFIGURE) {
666            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
667            /*only register buffers for streams that have already been
668              registered*/
669            if ((*it)->registered) {
670                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
671                        (*it)->buffer_set.buffers);
672                if (rc != NO_ERROR) {
673                    ALOGE("%s: Failed to register the buffers of old stream,\
674                            rc = %d", __func__, rc);
675                }
676                ALOGV("%s: channel %p has %d buffers",
677                        __func__, channel, (*it)->buffer_set.num_buffers);
678            }
679        }
680
681        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
682        if (index == NAME_NOT_FOUND) {
683            mPendingBuffersMap.add((*it)->stream, 0);
684        } else {
685            mPendingBuffersMap.editValueAt(index) = 0;
686        }
687    }
688
689    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
690    mPendingRequestsList.clear();
691
692    /*flush the metadata list*/
693    if (!mStoredMetadataList.empty()) {
694        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
695              m != mStoredMetadataList.end(); m++) {
696            mMetadataChannel->bufDone(m->meta_buf);
697            free(m->meta_buf);
698            m = mStoredMetadataList.erase(m);
699        }
700    }
701    int32_t hal_version = CAM_HAL_V3;
702    stream_config_info.num_streams = streamList->num_streams;
703
704    //settings/parameters don't carry over for new configureStreams
705    memset(mParameters, 0, sizeof(parm_buffer_t));
706
707    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
708    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
709                sizeof(hal_version), &hal_version);
710
711    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
712                sizeof(stream_config_info), &stream_config_info);
713
714    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
715
716    mFirstRequest = true;
717
718    //Get min frame duration for this streams configuration
719    deriveMinFrameDuration();
720
721    pthread_mutex_unlock(&mMutex);
722    return rc;
723}
724
725/*===========================================================================
726 * FUNCTION   : validateCaptureRequest
727 *
728 * DESCRIPTION: validate a capture request from camera service
729 *
730 * PARAMETERS :
731 *   @request : request from framework to process
732 *
733 * RETURN     :
734 *
735 *==========================================================================*/
736int QCamera3HardwareInterface::validateCaptureRequest(
737                    camera3_capture_request_t *request)
738{
739    ssize_t idx = 0;
740    const camera3_stream_buffer_t *b;
741    CameraMetadata meta;
742
743    /* Sanity check the request */
744    if (request == NULL) {
745        ALOGE("%s: NULL capture request", __func__);
746        return BAD_VALUE;
747    }
748
749    uint32_t frameNumber = request->frame_number;
750    if (request->input_buffer != NULL &&
751            request->input_buffer->stream != mInputStream) {
752        ALOGE("%s: Request %d: Input buffer not from input stream!",
753                __FUNCTION__, frameNumber);
754        return BAD_VALUE;
755    }
756    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
757        ALOGE("%s: Request %d: No output buffers provided!",
758                __FUNCTION__, frameNumber);
759        return BAD_VALUE;
760    }
761    if (request->input_buffer != NULL) {
762        b = request->input_buffer;
763        QCamera3Channel *channel =
764            static_cast<QCamera3Channel*>(b->stream->priv);
765        if (channel == NULL) {
766            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
767                    __func__, frameNumber, idx);
768            return BAD_VALUE;
769        }
770        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
771            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
772                    __func__, frameNumber, idx);
773            return BAD_VALUE;
774        }
775        if (b->release_fence != -1) {
776            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
777                    __func__, frameNumber, idx);
778            return BAD_VALUE;
779        }
780        if (b->buffer == NULL) {
781            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
782                    __func__, frameNumber, idx);
783            return BAD_VALUE;
784        }
785    }
786
787    // Validate all buffers
788    b = request->output_buffers;
789    do {
790        QCamera3Channel *channel =
791                static_cast<QCamera3Channel*>(b->stream->priv);
792        if (channel == NULL) {
793            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
794                    __func__, frameNumber, idx);
795            return BAD_VALUE;
796        }
797        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
798            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->release_fence != -1) {
803            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807        if (b->buffer == NULL) {
808            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
809                    __func__, frameNumber, idx);
810            return BAD_VALUE;
811        }
812        idx++;
813        b = request->output_buffers + idx;
814    } while (idx < (ssize_t)request->num_output_buffers);
815
816    return NO_ERROR;
817}
818
819/*===========================================================================
820 * FUNCTION   : deriveMinFrameDuration
821 *
822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
823 *              on currently configured streams.
824 *
825 * PARAMETERS : NONE
826 *
827 * RETURN     : NONE
828 *
829 *==========================================================================*/
830void QCamera3HardwareInterface::deriveMinFrameDuration()
831{
832    int32_t maxJpegDimension, maxProcessedDimension;
833
834    maxJpegDimension = 0;
835    maxProcessedDimension = 0;
836
837    // Figure out maximum jpeg, processed, and raw dimensions
838    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
839        it != mStreamInfo.end(); it++) {
840
841        // Input stream doesn't have valid stream_type
842        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
843            continue;
844
845        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
846        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
847            if (dimension > maxJpegDimension)
848                maxJpegDimension = dimension;
849        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
850            if (dimension > maxProcessedDimension)
851                maxProcessedDimension = dimension;
852        }
853    }
854
855    //Assume all jpeg dimensions are in processed dimensions.
856    if (maxJpegDimension > maxProcessedDimension)
857        maxProcessedDimension = maxJpegDimension;
858
859    //Find minimum durations for processed, jpeg, and raw
860    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
861    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
862        if (maxProcessedDimension ==
863            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
864            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
865            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
866            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
867            break;
868        }
869    }
870}
871
872/*===========================================================================
873 * FUNCTION   : getMinFrameDuration
874 *
875 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
876 *              and current request configuration.
877 *
878 * PARAMETERS : @request: requset sent by the frameworks
879 *
880 * RETURN     : min farme duration for a particular request
881 *
882 *==========================================================================*/
883int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
884{
885    bool hasJpegStream = false;
886    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
887        const camera3_stream_t *stream = request->output_buffers[i].stream;
888        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
889            hasJpegStream = true;
890    }
891
892    if (!hasJpegStream)
893        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
894    else
895        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
896}
897
898/*===========================================================================
899 * FUNCTION   : registerStreamBuffers
900 *
901 * DESCRIPTION: Register buffers for a given stream with the HAL device.
902 *
903 * PARAMETERS :
904 *   @stream_list : streams to be configured
905 *
906 * RETURN     :
907 *
908 *==========================================================================*/
909int QCamera3HardwareInterface::registerStreamBuffers(
910        const camera3_stream_buffer_set_t *buffer_set)
911{
912    int rc = 0;
913
914    pthread_mutex_lock(&mMutex);
915
916    if (buffer_set == NULL) {
917        ALOGE("%s: Invalid buffer_set parameter.", __func__);
918        pthread_mutex_unlock(&mMutex);
919        return -EINVAL;
920    }
921    if (buffer_set->stream == NULL) {
922        ALOGE("%s: Invalid stream parameter.", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return -EINVAL;
925    }
926    if (buffer_set->num_buffers < 1) {
927        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
928        pthread_mutex_unlock(&mMutex);
929        return -EINVAL;
930    }
931    if (buffer_set->buffers == NULL) {
932        ALOGE("%s: Invalid buffers parameter.", __func__);
933        pthread_mutex_unlock(&mMutex);
934        return -EINVAL;
935    }
936
937    camera3_stream_t *stream = buffer_set->stream;
938    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
939
940    //set the buffer_set in the mStreamInfo array
941    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
942            it != mStreamInfo.end(); it++) {
943        if ((*it)->stream == stream) {
944            uint32_t numBuffers = buffer_set->num_buffers;
945            (*it)->buffer_set.stream = buffer_set->stream;
946            (*it)->buffer_set.num_buffers = numBuffers;
947            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
948            if ((*it)->buffer_set.buffers == NULL) {
949                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
950                pthread_mutex_unlock(&mMutex);
951                return -ENOMEM;
952            }
953            for (size_t j = 0; j < numBuffers; j++){
954                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
955            }
956            (*it)->registered = 1;
957        }
958    }
959    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
960    if (rc < 0) {
961        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
962        pthread_mutex_unlock(&mMutex);
963        return -ENODEV;
964    }
965
966    pthread_mutex_unlock(&mMutex);
967    return NO_ERROR;
968}
969
970/*===========================================================================
971 * FUNCTION   : processCaptureRequest
972 *
973 * DESCRIPTION: process a capture request from camera service
974 *
975 * PARAMETERS :
976 *   @request : request from framework to process
977 *
978 * RETURN     :
979 *
980 *==========================================================================*/
981int QCamera3HardwareInterface::processCaptureRequest(
982                    camera3_capture_request_t *request)
983{
984    int rc = NO_ERROR;
985    int32_t request_id;
986    CameraMetadata meta;
987    MetadataBufferInfo reproc_meta;
988    int queueMetadata = 0;
989
990    pthread_mutex_lock(&mMutex);
991
992    rc = validateCaptureRequest(request);
993    if (rc != NO_ERROR) {
994        ALOGE("%s: incoming request is not valid", __func__);
995        pthread_mutex_unlock(&mMutex);
996        return rc;
997    }
998
999    meta = request->settings;
1000
1001    // For first capture request, send capture intent, and
1002    // stream on all streams
1003    if (mFirstRequest) {
1004
1005        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1006            int32_t hal_version = CAM_HAL_V3;
1007            uint8_t captureIntent =
1008                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1009
1010            memset(mParameters, 0, sizeof(parm_buffer_t));
1011            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1012            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1013                sizeof(hal_version), &hal_version);
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1015                sizeof(captureIntent), &captureIntent);
1016            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1017                mParameters);
1018        }
1019
1020        mMetadataChannel->start();
1021        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1022            it != mStreamInfo.end(); it++) {
1023            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1024            channel->start();
1025        }
1026    }
1027
1028    uint32_t frameNumber = request->frame_number;
1029    uint32_t streamTypeMask = 0;
1030
1031    if (meta.exists(ANDROID_REQUEST_ID)) {
1032        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1033        mCurrentRequestId = request_id;
1034        ALOGV("%s: Received request with id: %d",__func__, request_id);
1035    } else if (mFirstRequest || mCurrentRequestId == -1){
1036        ALOGE("%s: Unable to find request id field, \
1037                & no previous id available", __func__);
1038        return NAME_NOT_FOUND;
1039    } else {
1040        ALOGV("%s: Re-using old request id", __func__);
1041        request_id = mCurrentRequestId;
1042    }
1043
1044    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1045                                    __func__, __LINE__,
1046                                    request->num_output_buffers,
1047                                    request->input_buffer,
1048                                    frameNumber);
1049    // Acquire all request buffers first
1050    int blob_request = 0;
1051    for (size_t i = 0; i < request->num_output_buffers; i++) {
1052        const camera3_stream_buffer_t& output = request->output_buffers[i];
1053        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1054        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1055
1056        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1057        //Call function to store local copy of jpeg data for encode params.
1058            blob_request = 1;
1059            rc = getJpegSettings(request->settings);
1060            if (rc < 0) {
1061                ALOGE("%s: failed to get jpeg parameters", __func__);
1062                pthread_mutex_unlock(&mMutex);
1063                return rc;
1064            }
1065        }
1066
1067        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1068        if (rc != OK) {
1069            ALOGE("%s: fence wait failed %d", __func__, rc);
1070            pthread_mutex_unlock(&mMutex);
1071            return rc;
1072        }
1073        streamTypeMask |= channel->getStreamTypeMask();
1074    }
1075
1076    rc = setFrameParameters(request, streamTypeMask);
1077    if (rc < 0) {
1078        ALOGE("%s: fail to set frame parameters", __func__);
1079        pthread_mutex_unlock(&mMutex);
1080        return rc;
1081    }
1082
1083    /* Update pending request list and pending buffers map */
1084    PendingRequestInfo pendingRequest;
1085    pendingRequest.frame_number = frameNumber;
1086    pendingRequest.num_buffers = request->num_output_buffers;
1087    pendingRequest.request_id = request_id;
1088    pendingRequest.blob_request = blob_request;
1089    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1090
1091    for (size_t i = 0; i < request->num_output_buffers; i++) {
1092        RequestedBufferInfo requestedBuf;
1093        requestedBuf.stream = request->output_buffers[i].stream;
1094        requestedBuf.buffer = NULL;
1095        pendingRequest.buffers.push_back(requestedBuf);
1096
1097        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1098    }
1099    mPendingRequestsList.push_back(pendingRequest);
1100
1101    // Notify metadata channel we receive a request
1102    mMetadataChannel->request(NULL, frameNumber);
1103
1104    // Call request on other streams
1105    for (size_t i = 0; i < request->num_output_buffers; i++) {
1106        const camera3_stream_buffer_t& output = request->output_buffers[i];
1107        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1108        mm_camera_buf_def_t *pInputBuffer = NULL;
1109
1110        if (channel == NULL) {
1111            ALOGE("%s: invalid channel pointer for stream", __func__);
1112            continue;
1113        }
1114
1115        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1116            QCamera3RegularChannel* inputChannel = NULL;
1117            if(request->input_buffer != NULL){
1118                //Try to get the internal format
1119                inputChannel = (QCamera3RegularChannel*)
1120                    request->input_buffer->stream->priv;
1121                if(inputChannel == NULL ){
1122                    ALOGE("%s: failed to get input channel handle", __func__);
1123                } else {
1124                    pInputBuffer =
1125                        inputChannel->getInternalFormatBuffer(
1126                                request->input_buffer->buffer);
1127                    ALOGD("%s: Input buffer dump",__func__);
1128                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1129                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1130                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1131                    ALOGD("Handle:%p", request->input_buffer->buffer);
1132                    //TODO: need to get corresponding metadata and send it to pproc
1133                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1134                         m != mStoredMetadataList.end(); m++) {
1135                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1136                            reproc_meta.meta_buf = m->meta_buf;
1137                            queueMetadata = 1;
1138                            break;
1139                        }
1140                    }
1141                }
1142            }
1143            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1144                            pInputBuffer,(QCamera3Channel*)inputChannel);
1145            if (queueMetadata) {
1146                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1147            }
1148        } else {
1149            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1150                __LINE__, output.buffer, frameNumber);
1151            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1152                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1153                     m != mStoredMetadataList.end(); m++) {
1154                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1155                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1156                            mMetadataChannel->bufDone(m->meta_buf);
1157                            free(m->meta_buf);
1158                            m = mStoredMetadataList.erase(m);
1159                            break;
1160                        }
1161                   }
1162                }
1163            }
1164            rc = channel->request(output.buffer, frameNumber);
1165        }
1166        if (rc < 0)
1167            ALOGE("%s: request failed", __func__);
1168    }
1169
1170    mFirstRequest = false;
1171
1172    //Block on conditional variable
1173    mPendingRequest = 1;
1174    while (mPendingRequest == 1) {
1175        pthread_cond_wait(&mRequestCond, &mMutex);
1176    }
1177
1178    pthread_mutex_unlock(&mMutex);
1179    return rc;
1180}
1181
1182/*===========================================================================
1183 * FUNCTION   : getMetadataVendorTagOps
1184 *
1185 * DESCRIPTION:
1186 *
1187 * PARAMETERS :
1188 *
1189 *
1190 * RETURN     :
1191 *==========================================================================*/
1192void QCamera3HardwareInterface::getMetadataVendorTagOps(
1193                    vendor_tag_query_ops_t* /*ops*/)
1194{
1195    /* Enable locks when we eventually add Vendor Tags */
1196    /*
1197    pthread_mutex_lock(&mMutex);
1198
1199    pthread_mutex_unlock(&mMutex);
1200    */
1201    return;
1202}
1203
1204/*===========================================================================
1205 * FUNCTION   : dump
1206 *
1207 * DESCRIPTION:
1208 *
1209 * PARAMETERS :
1210 *
1211 *
1212 * RETURN     :
1213 *==========================================================================*/
1214void QCamera3HardwareInterface::dump(int /*fd*/)
1215{
1216    /*Enable lock when we implement this function*/
1217    /*
1218    pthread_mutex_lock(&mMutex);
1219
1220    pthread_mutex_unlock(&mMutex);
1221    */
1222    return;
1223}
1224
1225/*===========================================================================
1226 * FUNCTION   : flush
1227 *
1228 * DESCRIPTION:
1229 *
1230 * PARAMETERS :
1231 *
1232 *
1233 * RETURN     :
1234 *==========================================================================*/
1235int QCamera3HardwareInterface::flush()
1236{
1237    /*Enable lock when we implement this function*/
1238    /*
1239    pthread_mutex_lock(&mMutex);
1240
1241    pthread_mutex_unlock(&mMutex);
1242    */
1243    return 0;
1244}
1245
1246/*===========================================================================
1247 * FUNCTION   : captureResultCb
1248 *
1249 * DESCRIPTION: Callback handler for all capture result
1250 *              (streams, as well as metadata)
1251 *
1252 * PARAMETERS :
1253 *   @metadata : metadata information
1254 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1255 *               NULL if metadata.
1256 *
1257 * RETURN     : NONE
1258 *==========================================================================*/
1259void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1260                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1261{
1262    pthread_mutex_lock(&mMutex);
1263
1264    if (metadata_buf) {
1265        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1266        int32_t frame_number_valid = *(int32_t *)
1267            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1268        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1269            CAM_INTF_META_PENDING_REQUESTS, metadata);
1270        uint32_t frame_number = *(uint32_t *)
1271            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1272        const struct timeval *tv = (const struct timeval *)
1273            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1274        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1275            tv->tv_usec * NSEC_PER_USEC;
1276
1277        if (!frame_number_valid) {
1278            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1279            mMetadataChannel->bufDone(metadata_buf);
1280            goto done_metadata;
1281        }
1282        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1283                frame_number, capture_time);
1284
1285        // Go through the pending requests info and send shutter/results to frameworks
1286        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1287                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1288            camera3_capture_result_t result;
1289            camera3_notify_msg_t notify_msg;
1290            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1291
1292            // Flush out all entries with less or equal frame numbers.
1293
1294            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1295            //Right now it's the same as metadata timestamp
1296
1297            //TODO: When there is metadata drop, how do we derive the timestamp of
1298            //dropped frames? For now, we fake the dropped timestamp by substracting
1299            //from the reported timestamp
1300            nsecs_t current_capture_time = capture_time -
1301                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1302
1303            // Send shutter notify to frameworks
1304            notify_msg.type = CAMERA3_MSG_SHUTTER;
1305            notify_msg.message.shutter.frame_number = i->frame_number;
1306            notify_msg.message.shutter.timestamp = current_capture_time;
1307            mCallbackOps->notify(mCallbackOps, &notify_msg);
1308            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1309                    i->frame_number, capture_time);
1310
1311            // Send empty metadata with already filled buffers for dropped metadata
1312            // and send valid metadata with already filled buffers for current metadata
1313            if (i->frame_number < frame_number) {
1314                CameraMetadata dummyMetadata;
1315                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1316                        &current_capture_time, 1);
1317                dummyMetadata.update(ANDROID_REQUEST_ID,
1318                        &(i->request_id), 1);
1319                result.result = dummyMetadata.release();
1320            } else {
1321                result.result = translateCbMetadataToResultMetadata(metadata,
1322                        current_capture_time, i->request_id);
1323                if (mIsZslMode) {
1324                   int found_metadata = 0;
1325                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1326                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1327                        j != i->buffers.end(); j++) {
1328                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1329                         //check if corresp. zsl already exists in the stored metadata list
1330                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1331                               m != mStoredMetadataList.begin(); m++) {
1332                            if (m->frame_number == frame_number) {
1333                               m->meta_buf = metadata_buf;
1334                               found_metadata = 1;
1335                               break;
1336                            }
1337                         }
1338                         if (!found_metadata) {
1339                            MetadataBufferInfo store_meta_info;
1340                            store_meta_info.meta_buf = metadata_buf;
1341                            store_meta_info.frame_number = frame_number;
1342                            mStoredMetadataList.push_back(store_meta_info);
1343                            found_metadata = 1;
1344                         }
1345                      }
1346                   }
1347                   if (!found_metadata) {
1348                       if (!i->input_buffer_present && i->blob_request) {
1349                          //livesnapshot or fallback non-zsl snapshot case
1350                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1351                                j != i->buffers.end(); j++){
1352                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1353                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1354                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1355                                 break;
1356                              }
1357                         }
1358                       } else {
1359                            //return the metadata immediately
1360                            mMetadataChannel->bufDone(metadata_buf);
1361                            free(metadata_buf);
1362                       }
1363                   }
1364               } else if (!mIsZslMode && i->blob_request) {
1365                   //If it is a blob request then send the metadata to the picture channel
1366                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1367               } else {
1368                   // Return metadata buffer
1369                   mMetadataChannel->bufDone(metadata_buf);
1370                   free(metadata_buf);
1371               }
1372
1373            }
1374            if (!result.result) {
1375                ALOGE("%s: metadata is NULL", __func__);
1376            }
1377            result.frame_number = i->frame_number;
1378            result.num_output_buffers = 0;
1379            result.output_buffers = NULL;
1380            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1381                    j != i->buffers.end(); j++) {
1382                if (j->buffer) {
1383                    result.num_output_buffers++;
1384                }
1385            }
1386
1387            if (result.num_output_buffers > 0) {
1388                camera3_stream_buffer_t *result_buffers =
1389                    new camera3_stream_buffer_t[result.num_output_buffers];
1390                if (!result_buffers) {
1391                    ALOGE("%s: Fatal error: out of memory", __func__);
1392                }
1393                size_t result_buffers_idx = 0;
1394                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1395                        j != i->buffers.end(); j++) {
1396                    if (j->buffer) {
1397                        result_buffers[result_buffers_idx++] = *(j->buffer);
1398                        free(j->buffer);
1399                        j->buffer = NULL;
1400                        mPendingBuffersMap.editValueFor(j->stream)--;
1401                    }
1402                }
1403                result.output_buffers = result_buffers;
1404
1405                mCallbackOps->process_capture_result(mCallbackOps, &result);
1406                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1407                        __func__, result.frame_number, current_capture_time);
1408                free_camera_metadata((camera_metadata_t *)result.result);
1409                delete[] result_buffers;
1410            } else {
1411                mCallbackOps->process_capture_result(mCallbackOps, &result);
1412                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1413                        __func__, result.frame_number, current_capture_time);
1414                free_camera_metadata((camera_metadata_t *)result.result);
1415            }
1416            // erase the element from the list
1417            i = mPendingRequestsList.erase(i);
1418        }
1419
1420
1421done_metadata:
1422        bool max_buffers_dequeued = false;
1423        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1424            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1425            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1426            if (queued_buffers == stream->max_buffers) {
1427                max_buffers_dequeued = true;
1428                break;
1429            }
1430        }
1431        if (!max_buffers_dequeued && !pending_requests) {
1432            // Unblock process_capture_request
1433            mPendingRequest = 0;
1434            pthread_cond_signal(&mRequestCond);
1435        }
1436    } else {
1437        // If the frame number doesn't exist in the pending request list,
1438        // directly send the buffer to the frameworks, and update pending buffers map
1439        // Otherwise, book-keep the buffer.
1440        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1441        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1442            i++;
1443        }
1444        if (i == mPendingRequestsList.end()) {
1445            // Verify all pending requests frame_numbers are greater
1446            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1447                    j != mPendingRequestsList.end(); j++) {
1448                if (j->frame_number < frame_number) {
1449                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1450                            __func__, j->frame_number, frame_number);
1451                }
1452            }
1453            camera3_capture_result_t result;
1454            result.result = NULL;
1455            result.frame_number = frame_number;
1456            result.num_output_buffers = 1;
1457            result.output_buffers = buffer;
1458            ALOGV("%s: result frame_number = %d, buffer = %p",
1459                    __func__, frame_number, buffer);
1460            mPendingBuffersMap.editValueFor(buffer->stream)--;
1461            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1462                int found = 0;
1463                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1464                      k != mStoredMetadataList.end(); k++) {
1465                    if (k->frame_number == frame_number) {
1466                        k->zsl_buf_hdl = buffer->buffer;
1467                        found = 1;
1468                        break;
1469                    }
1470                }
1471                if (!found) {
1472                   MetadataBufferInfo meta_info;
1473                   meta_info.frame_number = frame_number;
1474                   meta_info.zsl_buf_hdl = buffer->buffer;
1475                   mStoredMetadataList.push_back(meta_info);
1476                }
1477            }
1478            mCallbackOps->process_capture_result(mCallbackOps, &result);
1479        } else {
1480            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1481                    j != i->buffers.end(); j++) {
1482                if (j->stream == buffer->stream) {
1483                    if (j->buffer != NULL) {
1484                        ALOGE("%s: Error: buffer is already set", __func__);
1485                    } else {
1486                        j->buffer = (camera3_stream_buffer_t *)malloc(
1487                                sizeof(camera3_stream_buffer_t));
1488                        *(j->buffer) = *buffer;
1489                        ALOGV("%s: cache buffer %p at result frame_number %d",
1490                                __func__, buffer, frame_number);
1491                    }
1492                }
1493            }
1494        }
1495    }
1496    pthread_mutex_unlock(&mMutex);
1497    return;
1498}
1499
1500/*===========================================================================
1501 * FUNCTION   : translateCbMetadataToResultMetadata
1502 *
1503 * DESCRIPTION:
1504 *
1505 * PARAMETERS :
1506 *   @metadata : metadata information from callback
1507 *
1508 * RETURN     : camera_metadata_t*
1509 *              metadata in a format specified by fwk
1510 *==========================================================================*/
1511camera_metadata_t*
1512QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1513                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1514                                 int32_t request_id)
1515{
1516    CameraMetadata camMetadata;
1517    camera_metadata_t* resultMetadata;
1518
1519    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1520    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1521
1522    /*CAM_INTF_META_HISTOGRAM - TODO*/
1523    /*cam_hist_stats_t  *histogram =
1524      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1525      metadata);*/
1526
1527    /*face detection*/
1528    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1529        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1530    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1531    int32_t faceIds[numFaces];
1532    uint8_t faceScores[numFaces];
1533    int32_t faceRectangles[numFaces * 4];
1534    int32_t faceLandmarks[numFaces * 6];
1535    int j = 0, k = 0;
1536    for (int i = 0; i < numFaces; i++) {
1537        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1538        faceScores[i] = faceDetectionInfo->faces[i].score;
1539        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1540                faceRectangles+j, -1);
1541        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1542        j+= 4;
1543        k+= 6;
1544    }
1545    if (numFaces > 0) {
1546        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1547        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1548        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1549            faceRectangles, numFaces*4);
1550        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1551            faceLandmarks, numFaces*6);
1552    }
1553
1554    uint8_t  *color_correct_mode =
1555        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1556    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1557
1558    int32_t  *ae_precapture_id =
1559        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1560    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1561
1562    /*aec regions*/
1563    cam_area_t  *hAeRegions =
1564        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1565    int32_t aeRegions[5];
1566    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1567    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1568
1569    uint8_t *ae_state =
1570            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1571    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1572
1573    uint8_t  *focusMode =
1574        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1575    uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1576            sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1577    camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1578
1579    /*af regions*/
1580    cam_area_t  *hAfRegions =
1581        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1582    int32_t afRegions[5];
1583    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1584    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1585
1586    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1587    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1588
1589    int32_t  *afTriggerId =
1590        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1591    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1592
1593    uint8_t  *whiteBalance =
1594        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1595    uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1596        sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1597        *whiteBalance);
1598    camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1599
1600    /*awb regions*/
1601    cam_area_t  *hAwbRegions =
1602        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1603    int32_t awbRegions[5];
1604    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1605    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1606
1607    uint8_t  *whiteBalanceState =
1608        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1609    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1610
1611    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1612    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1613
1614    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1615    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1616
1617    uint8_t  *flashPower =
1618        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1619    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1620
1621    int64_t  *flashFiringTime =
1622        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1623    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1624
1625    /*int32_t  *ledMode =
1626      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1627      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1628
1629    uint8_t  *flashState =
1630        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1631    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1632
1633    uint8_t  *hotPixelMode =
1634        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1635    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1636
1637    float  *lensAperture =
1638        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1639    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1640
1641    float  *filterDensity =
1642        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1643    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1644
1645    float  *focalLength =
1646        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1647    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1648
1649    float  *focusDistance =
1650        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1651    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1652
1653    float  *focusRange =
1654        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1655    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1656
1657    uint8_t  *opticalStab =
1658        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1659    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1660
1661    /*int32_t  *focusState =
1662      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1663      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1664
1665    uint8_t  *noiseRedMode =
1666        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1667    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1668
1669    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1670
1671    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1672        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1673    int32_t scalerCropRegion[4];
1674    scalerCropRegion[0] = hScalerCropRegion->left;
1675    scalerCropRegion[1] = hScalerCropRegion->top;
1676    scalerCropRegion[2] = hScalerCropRegion->width;
1677    scalerCropRegion[3] = hScalerCropRegion->height;
1678    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1679
1680    int64_t  *sensorExpTime =
1681        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1682    mMetadataResponse.exposure_time = *sensorExpTime;
1683    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1684    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1685
1686    int64_t  *sensorFameDuration =
1687        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1688    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1689    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1690
1691    int32_t  *sensorSensitivity =
1692        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1693    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1694    mMetadataResponse.iso_speed = *sensorSensitivity;
1695    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1696
1697    uint8_t  *shadingMode =
1698        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1699    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1700
1701    uint8_t  *faceDetectMode =
1702        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1703    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1704        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1705        *faceDetectMode);
1706    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1707
1708    uint8_t  *histogramMode =
1709        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1710    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1711
1712    uint8_t  *sharpnessMapMode =
1713        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1714    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1715            sharpnessMapMode, 1);
1716
1717    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1718    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1719        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1720    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1721            (int32_t*)sharpnessMap->sharpness,
1722            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1723
1724    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1725        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1726    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1727    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1728    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1729                       (float*)lensShadingMap->lens_shading,
1730                       4*map_width*map_height);
1731
1732    //Populate CAM_INTF_META_TONEMAP_CURVES
1733    /* ch0 = G, ch 1 = B, ch 2 = R*/
1734    cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1735        POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1736    camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1737                       (float*)tonemap->curves[0].tonemap_points,
1738                       tonemap->tonemap_points_cnt * 2);
1739
1740    camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1741                       (float*)tonemap->curves[1].tonemap_points,
1742                       tonemap->tonemap_points_cnt * 2);
1743
1744    camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1745                       (float*)tonemap->curves[2].tonemap_points,
1746                       tonemap->tonemap_points_cnt * 2);
1747
1748    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1749        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1750    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1751
1752    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1753        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1754    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1755                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1756
1757    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1758        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1759    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1760                       predColorCorrectionGains->gains, 4);
1761
1762    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1763        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1764    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1765                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1766
1767    uint8_t *blackLevelLock = (uint8_t*)
1768        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1769    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1770
1771    uint8_t *sceneFlicker = (uint8_t*)
1772        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1773    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1774
1775
1776    resultMetadata = camMetadata.release();
1777    return resultMetadata;
1778}
1779
1780/*===========================================================================
1781 * FUNCTION   : convertToRegions
1782 *
1783 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1784 *
1785 * PARAMETERS :
1786 *   @rect   : cam_rect_t struct to convert
1787 *   @region : int32_t destination array
1788 *   @weight : if we are converting from cam_area_t, weight is valid
1789 *             else weight = -1
1790 *
1791 *==========================================================================*/
1792void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1793    region[0] = rect.left;
1794    region[1] = rect.top;
1795    region[2] = rect.left + rect.width;
1796    region[3] = rect.top + rect.height;
1797    if (weight > -1) {
1798        region[4] = weight;
1799    }
1800}
1801
1802/*===========================================================================
1803 * FUNCTION   : convertFromRegions
1804 *
1805 * DESCRIPTION: helper method to convert from array to cam_rect_t
1806 *
1807 * PARAMETERS :
1808 *   @rect   : cam_rect_t struct to convert
1809 *   @region : int32_t destination array
1810 *   @weight : if we are converting from cam_area_t, weight is valid
1811 *             else weight = -1
1812 *
1813 *==========================================================================*/
1814void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1815                                                   const camera_metadata_t *settings,
1816                                                   uint32_t tag){
1817    CameraMetadata frame_settings;
1818    frame_settings = settings;
1819    int32_t x_min = frame_settings.find(tag).data.i32[0];
1820    int32_t y_min = frame_settings.find(tag).data.i32[1];
1821    int32_t x_max = frame_settings.find(tag).data.i32[2];
1822    int32_t y_max = frame_settings.find(tag).data.i32[3];
1823    roi->weight = frame_settings.find(tag).data.i32[4];
1824    roi->rect.left = x_min;
1825    roi->rect.top = y_min;
1826    roi->rect.width = x_max - x_min;
1827    roi->rect.height = y_max - y_min;
1828}
1829
1830/*===========================================================================
1831 * FUNCTION   : resetIfNeededROI
1832 *
1833 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1834 *              crop region
1835 *
1836 * PARAMETERS :
1837 *   @roi       : cam_area_t struct to resize
1838 *   @scalerCropRegion : cam_crop_region_t region to compare against
1839 *
1840 *
1841 *==========================================================================*/
1842bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1843                                                 const cam_crop_region_t* scalerCropRegion)
1844{
1845    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1846    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1847    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1848    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1849    if ((roi_x_max < scalerCropRegion->left) ||
1850        (roi_y_max < scalerCropRegion->top)  ||
1851        (roi->rect.left > crop_x_max) ||
1852        (roi->rect.top > crop_y_max)){
1853        return false;
1854    }
1855    if (roi->rect.left < scalerCropRegion->left) {
1856        roi->rect.left = scalerCropRegion->left;
1857    }
1858    if (roi->rect.top < scalerCropRegion->top) {
1859        roi->rect.top = scalerCropRegion->top;
1860    }
1861    if (roi_x_max > crop_x_max) {
1862        roi_x_max = crop_x_max;
1863    }
1864    if (roi_y_max > crop_y_max) {
1865        roi_y_max = crop_y_max;
1866    }
1867    roi->rect.width = roi_x_max - roi->rect.left;
1868    roi->rect.height = roi_y_max - roi->rect.top;
1869    return true;
1870}
1871
1872/*===========================================================================
1873 * FUNCTION   : convertLandmarks
1874 *
1875 * DESCRIPTION: helper method to extract the landmarks from face detection info
1876 *
1877 * PARAMETERS :
1878 *   @face   : cam_rect_t struct to convert
1879 *   @landmarks : int32_t destination array
1880 *
1881 *
1882 *==========================================================================*/
1883void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1884{
1885    landmarks[0] = face.left_eye_center.x;
1886    landmarks[1] = face.left_eye_center.y;
1887    landmarks[2] = face.right_eye_center.y;
1888    landmarks[3] = face.right_eye_center.y;
1889    landmarks[4] = face.mouth_center.x;
1890    landmarks[5] = face.mouth_center.y;
1891}
1892
1893#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1894/*===========================================================================
1895 * FUNCTION   : initCapabilities
1896 *
1897 * DESCRIPTION: initialize camera capabilities in static data struct
1898 *
1899 * PARAMETERS :
1900 *   @cameraId  : camera Id
1901 *
1902 * RETURN     : int32_t type of status
1903 *              NO_ERROR  -- success
1904 *              none-zero failure code
1905 *==========================================================================*/
1906int QCamera3HardwareInterface::initCapabilities(int cameraId)
1907{
1908    int rc = 0;
1909    mm_camera_vtbl_t *cameraHandle = NULL;
1910    QCamera3HeapMemory *capabilityHeap = NULL;
1911
1912    cameraHandle = camera_open(cameraId);
1913    if (!cameraHandle) {
1914        ALOGE("%s: camera_open failed", __func__);
1915        rc = -1;
1916        goto open_failed;
1917    }
1918
1919    capabilityHeap = new QCamera3HeapMemory();
1920    if (capabilityHeap == NULL) {
1921        ALOGE("%s: creation of capabilityHeap failed", __func__);
1922        goto heap_creation_failed;
1923    }
1924    /* Allocate memory for capability buffer */
1925    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1926    if(rc != OK) {
1927        ALOGE("%s: No memory for cappability", __func__);
1928        goto allocate_failed;
1929    }
1930
1931    /* Map memory for capability buffer */
1932    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1933    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1934                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1935                                capabilityHeap->getFd(0),
1936                                sizeof(cam_capability_t));
1937    if(rc < 0) {
1938        ALOGE("%s: failed to map capability buffer", __func__);
1939        goto map_failed;
1940    }
1941
1942    /* Query Capability */
1943    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1944    if(rc < 0) {
1945        ALOGE("%s: failed to query capability",__func__);
1946        goto query_failed;
1947    }
1948    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1949    if (!gCamCapability[cameraId]) {
1950        ALOGE("%s: out of memory", __func__);
1951        goto query_failed;
1952    }
1953    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1954                                        sizeof(cam_capability_t));
1955    rc = 0;
1956
1957query_failed:
1958    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1959                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1960map_failed:
1961    capabilityHeap->deallocate();
1962allocate_failed:
1963    delete capabilityHeap;
1964heap_creation_failed:
1965    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1966    cameraHandle = NULL;
1967open_failed:
1968    return rc;
1969}
1970
1971/*===========================================================================
1972 * FUNCTION   : initParameters
1973 *
1974 * DESCRIPTION: initialize camera parameters
1975 *
1976 * PARAMETERS :
1977 *
1978 * RETURN     : int32_t type of status
1979 *              NO_ERROR  -- success
1980 *              none-zero failure code
1981 *==========================================================================*/
1982int QCamera3HardwareInterface::initParameters()
1983{
1984    int rc = 0;
1985
1986    //Allocate Set Param Buffer
1987    mParamHeap = new QCamera3HeapMemory();
1988    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1989    if(rc != OK) {
1990        rc = NO_MEMORY;
1991        ALOGE("Failed to allocate SETPARM Heap memory");
1992        delete mParamHeap;
1993        mParamHeap = NULL;
1994        return rc;
1995    }
1996
1997    //Map memory for parameters buffer
1998    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1999            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2000            mParamHeap->getFd(0),
2001            sizeof(parm_buffer_t));
2002    if(rc < 0) {
2003        ALOGE("%s:failed to map SETPARM buffer",__func__);
2004        rc = FAILED_TRANSACTION;
2005        mParamHeap->deallocate();
2006        delete mParamHeap;
2007        mParamHeap = NULL;
2008        return rc;
2009    }
2010
2011    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2012    return rc;
2013}
2014
2015/*===========================================================================
2016 * FUNCTION   : deinitParameters
2017 *
2018 * DESCRIPTION: de-initialize camera parameters
2019 *
2020 * PARAMETERS :
2021 *
2022 * RETURN     : NONE
2023 *==========================================================================*/
2024void QCamera3HardwareInterface::deinitParameters()
2025{
2026    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2027            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2028
2029    mParamHeap->deallocate();
2030    delete mParamHeap;
2031    mParamHeap = NULL;
2032
2033    mParameters = NULL;
2034}
2035
2036/*===========================================================================
2037 * FUNCTION   : calcMaxJpegSize
2038 *
2039 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2040 *
2041 * PARAMETERS :
2042 *
2043 * RETURN     : max_jpeg_size
2044 *==========================================================================*/
2045int QCamera3HardwareInterface::calcMaxJpegSize()
2046{
2047    int32_t max_jpeg_size = 0;
2048    int temp_width, temp_height;
2049    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2050        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2051        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2052        if (temp_width * temp_height > max_jpeg_size ) {
2053            max_jpeg_size = temp_width * temp_height;
2054        }
2055    }
2056    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2057    return max_jpeg_size;
2058}
2059
2060/*===========================================================================
2061 * FUNCTION   : initStaticMetadata
2062 *
2063 * DESCRIPTION: initialize the static metadata
2064 *
2065 * PARAMETERS :
2066 *   @cameraId  : camera Id
2067 *
2068 * RETURN     : int32_t type of status
2069 *              0  -- success
2070 *              non-zero failure code
2071 *==========================================================================*/
2072int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2073{
2074    int rc = 0;
2075    CameraMetadata staticInfo;
2076
2077    /* android.info: hardware level */
2078    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2079    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2080        &supportedHardwareLevel, 1);
2081
2082    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2083    /*HAL 3 only*/
2084    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2085                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2086
2087    /*hard coded for now but this should come from sensor*/
2088    float min_focus_distance;
2089    if(facingBack){
2090        min_focus_distance = 10;
2091    } else {
2092        min_focus_distance = 0;
2093    }
2094    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2095                    &min_focus_distance, 1);
2096
2097    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2098                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2099
2100    /*should be using focal lengths but sensor doesn't provide that info now*/
2101    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2102                      &gCamCapability[cameraId]->focal_length,
2103                      1);
2104
2105    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2106                      gCamCapability[cameraId]->apertures,
2107                      gCamCapability[cameraId]->apertures_count);
2108
2109    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2110                gCamCapability[cameraId]->filter_densities,
2111                gCamCapability[cameraId]->filter_densities_count);
2112
2113
2114    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2115                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2116                      gCamCapability[cameraId]->optical_stab_modes_count);
2117
2118    staticInfo.update(ANDROID_LENS_POSITION,
2119                      gCamCapability[cameraId]->lens_position,
2120                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2121
2122    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2123                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2124    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2125                      lens_shading_map_size,
2126                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2127
2128    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2129                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2130    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2131            geo_correction_map_size,
2132            sizeof(geo_correction_map_size)/sizeof(int32_t));
2133
2134    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2135                       gCamCapability[cameraId]->geo_correction_map,
2136                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2137
2138    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2139            gCamCapability[cameraId]->sensor_physical_size, 2);
2140
2141    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2142            gCamCapability[cameraId]->exposure_time_range, 2);
2143
2144    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2145            &gCamCapability[cameraId]->max_frame_duration, 1);
2146
2147
2148    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2149                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2150
2151    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2152                                               gCamCapability[cameraId]->pixel_array_size.height};
2153    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2154                      pixel_array_size, 2);
2155
2156    int32_t active_array_size[] = {0, 0,
2157                                                gCamCapability[cameraId]->active_array_size.width,
2158                                                gCamCapability[cameraId]->active_array_size.height};
2159    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2160                      active_array_size, 4);
2161
2162    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2163            &gCamCapability[cameraId]->white_level, 1);
2164
2165    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2166            gCamCapability[cameraId]->black_level_pattern, 4);
2167
2168    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2169                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2170
2171    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2172                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2173
2174    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2175                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2176
2177    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2178                      &gCamCapability[cameraId]->histogram_size, 1);
2179
2180    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2181            &gCamCapability[cameraId]->max_histogram_count, 1);
2182
2183    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2184                                                gCamCapability[cameraId]->sharpness_map_size.height};
2185
2186    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2187            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2188
2189    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2190            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2191
2192
2193    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2194                      &gCamCapability[cameraId]->raw_min_duration,
2195                       1);
2196
2197    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2198                                                HAL_PIXEL_FORMAT_BLOB};
2199    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2200    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2201                      scalar_formats,
2202                      scalar_formats_count);
2203
2204    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2205    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2206              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2207              available_processed_sizes);
2208    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2209                available_processed_sizes,
2210                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2211
2212    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2213                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2214                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2215
2216    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2217    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2218                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2219                 available_fps_ranges);
2220    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2221            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2222
2223    camera_metadata_rational exposureCompensationStep = {
2224            gCamCapability[cameraId]->exp_compensation_step.numerator,
2225            gCamCapability[cameraId]->exp_compensation_step.denominator};
2226    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2227                      &exposureCompensationStep, 1);
2228
2229    /*TO DO*/
2230    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2231    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2232                      availableVstabModes, sizeof(availableVstabModes));
2233
2234    /*HAL 1 and HAL 3 common*/
2235    float maxZoom = 4;
2236    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2237            &maxZoom, 1);
2238
2239    int32_t max3aRegions = 1;
2240    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2241            &max3aRegions, 1);
2242
2243    uint8_t availableFaceDetectModes[] = {
2244            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2245            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2246    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2247                      availableFaceDetectModes,
2248                      sizeof(availableFaceDetectModes));
2249
2250    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2251                                       gCamCapability[cameraId]->raw_dim.height};
2252    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2253                      raw_size,
2254                      sizeof(raw_size)/sizeof(uint32_t));
2255
2256    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2257                                                        gCamCapability[cameraId]->exposure_compensation_max};
2258    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2259            exposureCompensationRange,
2260            sizeof(exposureCompensationRange)/sizeof(int32_t));
2261
2262    uint8_t lensFacing = (facingBack) ?
2263            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2264    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2265
2266    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2267                available_processed_sizes,
2268                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2269
2270    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2271                      available_thumbnail_sizes,
2272                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2273
2274    int32_t max_jpeg_size = 0;
2275    int temp_width, temp_height;
2276    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2277        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2278        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2279        if (temp_width * temp_height > max_jpeg_size ) {
2280            max_jpeg_size = temp_width * temp_height;
2281        }
2282    }
2283    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2284    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2285                      &max_jpeg_size, 1);
2286
2287    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2288    int32_t size = 0;
2289    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2290        int val = lookupFwkName(EFFECT_MODES_MAP,
2291                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2292                                   gCamCapability[cameraId]->supported_effects[i]);
2293        if (val != NAME_NOT_FOUND) {
2294            avail_effects[size] = (uint8_t)val;
2295            size++;
2296        }
2297    }
2298    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2299                      avail_effects,
2300                      size);
2301
2302    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2303    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2304    int32_t supported_scene_modes_cnt = 0;
2305    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2306        int val = lookupFwkName(SCENE_MODES_MAP,
2307                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2308                                gCamCapability[cameraId]->supported_scene_modes[i]);
2309        if (val != NAME_NOT_FOUND) {
2310            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2311            supported_indexes[supported_scene_modes_cnt] = i;
2312            supported_scene_modes_cnt++;
2313        }
2314    }
2315
2316    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2317                      avail_scene_modes,
2318                      supported_scene_modes_cnt);
2319
2320    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2321    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2322                      supported_scene_modes_cnt,
2323                      scene_mode_overrides,
2324                      supported_indexes,
2325                      cameraId);
2326    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2327                      scene_mode_overrides,
2328                      supported_scene_modes_cnt*3);
2329
2330    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2331    size = 0;
2332    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2333        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2334                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2335                                 gCamCapability[cameraId]->supported_antibandings[i]);
2336        if (val != NAME_NOT_FOUND) {
2337            avail_antibanding_modes[size] = (uint8_t)val;
2338            size++;
2339        }
2340
2341    }
2342    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2343                      avail_antibanding_modes,
2344                      size);
2345
2346    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2347    size = 0;
2348    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2349        int val = lookupFwkName(FOCUS_MODES_MAP,
2350                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2351                                gCamCapability[cameraId]->supported_focus_modes[i]);
2352        if (val != NAME_NOT_FOUND) {
2353            avail_af_modes[size] = (uint8_t)val;
2354            size++;
2355        }
2356    }
2357    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2358                      avail_af_modes,
2359                      size);
2360
2361    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2362    size = 0;
2363    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2364        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2365                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2366                                    gCamCapability[cameraId]->supported_white_balances[i]);
2367        if (val != NAME_NOT_FOUND) {
2368            avail_awb_modes[size] = (uint8_t)val;
2369            size++;
2370        }
2371    }
2372    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2373                      avail_awb_modes,
2374                      size);
2375
2376    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2377    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2378      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2379
2380    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2381            available_flash_levels,
2382            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2383
2384
2385    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2386    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2387            &flashAvailable, 1);
2388
2389    uint8_t avail_ae_modes[5];
2390    size = 0;
2391    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2392        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2393        size++;
2394    }
2395    if (flashAvailable) {
2396        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2397        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2398        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2399    }
2400    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2401                      avail_ae_modes,
2402                      size);
2403
2404    int32_t sensitivity_range[2];
2405    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2406    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2407    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2408                      sensitivity_range,
2409                      sizeof(sensitivity_range) / sizeof(int32_t));
2410
2411    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2412                      &gCamCapability[cameraId]->max_analog_sensitivity,
2413                      1);
2414
2415    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2416                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2417                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2418
2419    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2420    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2421                      &sensor_orientation,
2422                      1);
2423
2424    int32_t max_output_streams[3] = {1, 3, 1};
2425    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2426                      max_output_streams,
2427                      3);
2428
2429    gStaticMetadata[cameraId] = staticInfo.release();
2430    return rc;
2431}
2432
2433/*===========================================================================
2434 * FUNCTION   : makeTable
2435 *
2436 * DESCRIPTION: make a table of sizes
2437 *
2438 * PARAMETERS :
2439 *
2440 *
2441 *==========================================================================*/
2442void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2443                                          int32_t* sizeTable)
2444{
2445    int j = 0;
2446    for (int i = 0; i < size; i++) {
2447        sizeTable[j] = dimTable[i].width;
2448        sizeTable[j+1] = dimTable[i].height;
2449        j+=2;
2450    }
2451}
2452
2453/*===========================================================================
2454 * FUNCTION   : makeFPSTable
2455 *
2456 * DESCRIPTION: make a table of fps ranges
2457 *
2458 * PARAMETERS :
2459 *
2460 *==========================================================================*/
2461void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2462                                          int32_t* fpsRangesTable)
2463{
2464    int j = 0;
2465    for (int i = 0; i < size; i++) {
2466        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2467        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2468        j+=2;
2469    }
2470}
2471
2472/*===========================================================================
2473 * FUNCTION   : makeOverridesList
2474 *
2475 * DESCRIPTION: make a list of scene mode overrides
2476 *
2477 * PARAMETERS :
2478 *
2479 *
2480 *==========================================================================*/
2481void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2482                                                  uint8_t size, uint8_t* overridesList,
2483                                                  uint8_t* supported_indexes,
2484                                                  int camera_id)
2485{
2486    /*daemon will give a list of overrides for all scene modes.
2487      However we should send the fwk only the overrides for the scene modes
2488      supported by the framework*/
2489    int j = 0, index = 0, supt = 0;
2490    uint8_t focus_override;
2491    for (int i = 0; i < size; i++) {
2492        supt = 0;
2493        index = supported_indexes[i];
2494        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2495        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2496                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2497                                                    overridesTable[index].awb_mode);
2498        focus_override = (uint8_t)overridesTable[index].af_mode;
2499        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2500           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2501              supt = 1;
2502              break;
2503           }
2504        }
2505        if (supt) {
2506           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2507                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2508                                              focus_override);
2509        } else {
2510           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2511        }
2512        j+=3;
2513    }
2514}
2515
2516/*===========================================================================
2517 * FUNCTION   : getPreviewHalPixelFormat
2518 *
2519 * DESCRIPTION: convert the format to type recognized by framework
2520 *
2521 * PARAMETERS : format : the format from backend
2522 *
2523 ** RETURN    : format recognized by framework
2524 *
2525 *==========================================================================*/
2526int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2527{
2528    int32_t halPixelFormat;
2529
2530    switch (format) {
2531    case CAM_FORMAT_YUV_420_NV12:
2532        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2533        break;
2534    case CAM_FORMAT_YUV_420_NV21:
2535        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2536        break;
2537    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2538        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2539        break;
2540    case CAM_FORMAT_YUV_420_YV12:
2541        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2542        break;
2543    case CAM_FORMAT_YUV_422_NV16:
2544    case CAM_FORMAT_YUV_422_NV61:
2545    default:
2546        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2547        break;
2548    }
2549    return halPixelFormat;
2550}
2551
2552/*===========================================================================
2553 * FUNCTION   : getSensorSensitivity
2554 *
2555 * DESCRIPTION: convert iso_mode to an integer value
2556 *
2557 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2558 *
2559 ** RETURN    : sensitivity supported by sensor
2560 *
2561 *==========================================================================*/
2562int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2563{
2564    int32_t sensitivity;
2565
2566    switch (iso_mode) {
2567    case CAM_ISO_MODE_100:
2568        sensitivity = 100;
2569        break;
2570    case CAM_ISO_MODE_200:
2571        sensitivity = 200;
2572        break;
2573    case CAM_ISO_MODE_400:
2574        sensitivity = 400;
2575        break;
2576    case CAM_ISO_MODE_800:
2577        sensitivity = 800;
2578        break;
2579    case CAM_ISO_MODE_1600:
2580        sensitivity = 1600;
2581        break;
2582    default:
2583        sensitivity = -1;
2584        break;
2585    }
2586    return sensitivity;
2587}
2588
2589
2590/*===========================================================================
2591 * FUNCTION   : AddSetParmEntryToBatch
2592 *
2593 * DESCRIPTION: add set parameter entry into batch
2594 *
2595 * PARAMETERS :
2596 *   @p_table     : ptr to parameter buffer
2597 *   @paramType   : parameter type
2598 *   @paramLength : length of parameter value
2599 *   @paramValue  : ptr to parameter value
2600 *
2601 * RETURN     : int32_t type of status
2602 *              NO_ERROR  -- success
2603 *              none-zero failure code
2604 *==========================================================================*/
2605int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2606                                                          cam_intf_parm_type_t paramType,
2607                                                          uint32_t paramLength,
2608                                                          void *paramValue)
2609{
2610    int position = paramType;
2611    int current, next;
2612
2613    /*************************************************************************
2614    *                 Code to take care of linking next flags                *
2615    *************************************************************************/
2616    current = GET_FIRST_PARAM_ID(p_table);
2617    if (position == current){
2618        //DO NOTHING
2619    } else if (position < current){
2620        SET_NEXT_PARAM_ID(position, p_table, current);
2621        SET_FIRST_PARAM_ID(p_table, position);
2622    } else {
2623        /* Search for the position in the linked list where we need to slot in*/
2624        while (position > GET_NEXT_PARAM_ID(current, p_table))
2625            current = GET_NEXT_PARAM_ID(current, p_table);
2626
2627        /*If node already exists no need to alter linking*/
2628        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2629            next = GET_NEXT_PARAM_ID(current, p_table);
2630            SET_NEXT_PARAM_ID(current, p_table, position);
2631            SET_NEXT_PARAM_ID(position, p_table, next);
2632        }
2633    }
2634
2635    /*************************************************************************
2636    *                   Copy contents into entry                             *
2637    *************************************************************************/
2638
2639    if (paramLength > sizeof(parm_type_t)) {
2640        ALOGE("%s:Size of input larger than max entry size",__func__);
2641        return BAD_VALUE;
2642    }
2643    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2644    return NO_ERROR;
2645}
2646
2647/*===========================================================================
2648 * FUNCTION   : lookupFwkName
2649 *
2650 * DESCRIPTION: In case the enum is not same in fwk and backend
2651 *              make sure the parameter is correctly propogated
2652 *
2653 * PARAMETERS  :
2654 *   @arr      : map between the two enums
2655 *   @len      : len of the map
2656 *   @hal_name : name of the hal_parm to map
2657 *
2658 * RETURN     : int type of status
2659 *              fwk_name  -- success
2660 *              none-zero failure code
2661 *==========================================================================*/
2662int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2663                                             int len, int hal_name)
2664{
2665
2666    for (int i = 0; i < len; i++) {
2667        if (arr[i].hal_name == hal_name)
2668            return arr[i].fwk_name;
2669    }
2670
2671    /* Not able to find matching framework type is not necessarily
2672     * an error case. This happens when mm-camera supports more attributes
2673     * than the frameworks do */
2674    ALOGD("%s: Cannot find matching framework type", __func__);
2675    return NAME_NOT_FOUND;
2676}
2677
2678/*===========================================================================
2679 * FUNCTION   : lookupHalName
2680 *
2681 * DESCRIPTION: In case the enum is not same in fwk and backend
2682 *              make sure the parameter is correctly propogated
2683 *
2684 * PARAMETERS  :
2685 *   @arr      : map between the two enums
2686 *   @len      : len of the map
2687 *   @fwk_name : name of the hal_parm to map
2688 *
2689 * RETURN     : int32_t type of status
2690 *              hal_name  -- success
2691 *              none-zero failure code
2692 *==========================================================================*/
2693int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2694                                             int len, int fwk_name)
2695{
2696    for (int i = 0; i < len; i++) {
2697       if (arr[i].fwk_name == fwk_name)
2698           return arr[i].hal_name;
2699    }
2700    ALOGE("%s: Cannot find matching hal type", __func__);
2701    return NAME_NOT_FOUND;
2702}
2703
2704/*===========================================================================
2705 * FUNCTION   : getCapabilities
2706 *
2707 * DESCRIPTION: query camera capabilities
2708 *
2709 * PARAMETERS :
2710 *   @cameraId  : camera Id
2711 *   @info      : camera info struct to be filled in with camera capabilities
2712 *
2713 * RETURN     : int32_t type of status
2714 *              NO_ERROR  -- success
2715 *              none-zero failure code
2716 *==========================================================================*/
2717int QCamera3HardwareInterface::getCamInfo(int cameraId,
2718                                    struct camera_info *info)
2719{
2720    int rc = 0;
2721
2722    if (NULL == gCamCapability[cameraId]) {
2723        rc = initCapabilities(cameraId);
2724        if (rc < 0) {
2725            //pthread_mutex_unlock(&g_camlock);
2726            return rc;
2727        }
2728    }
2729
2730    if (NULL == gStaticMetadata[cameraId]) {
2731        rc = initStaticMetadata(cameraId);
2732        if (rc < 0) {
2733            return rc;
2734        }
2735    }
2736
2737    switch(gCamCapability[cameraId]->position) {
2738    case CAM_POSITION_BACK:
2739        info->facing = CAMERA_FACING_BACK;
2740        break;
2741
2742    case CAM_POSITION_FRONT:
2743        info->facing = CAMERA_FACING_FRONT;
2744        break;
2745
2746    default:
2747        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2748        rc = -1;
2749        break;
2750    }
2751
2752
2753    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2754    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2755    info->static_camera_characteristics = gStaticMetadata[cameraId];
2756
2757    return rc;
2758}
2759
2760/*===========================================================================
2761 * FUNCTION   : translateMetadata
2762 *
2763 * DESCRIPTION: translate the metadata into camera_metadata_t
2764 *
2765 * PARAMETERS : type of the request
2766 *
2767 *
2768 * RETURN     : success: camera_metadata_t*
2769 *              failure: NULL
2770 *
2771 *==========================================================================*/
2772camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2773{
2774    pthread_mutex_lock(&mMutex);
2775
2776    if (mDefaultMetadata[type] != NULL) {
2777        pthread_mutex_unlock(&mMutex);
2778        return mDefaultMetadata[type];
2779    }
2780    //first time we are handling this request
2781    //fill up the metadata structure using the wrapper class
2782    CameraMetadata settings;
2783    //translate from cam_capability_t to camera_metadata_tag_t
2784    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2785    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2786
2787    /*control*/
2788
2789    uint8_t controlIntent = 0;
2790    switch (type) {
2791      case CAMERA3_TEMPLATE_PREVIEW:
2792        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2793        break;
2794      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2795        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2796        break;
2797      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2798        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2799        break;
2800      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2801        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2802        break;
2803      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2804        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2805        break;
2806      default:
2807        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2808        break;
2809    }
2810    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2811
2812    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2813            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2814
2815    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2816    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2817
2818    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2819    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2820
2821    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2822    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2823
2824    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2825    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2826
2827    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2828    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2829
2830    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2831    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2832
2833    static uint8_t focusMode;
2834    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2835        ALOGE("%s: Setting focus mode to auto", __func__);
2836        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2837    } else {
2838        ALOGE("%s: Setting focus mode to off", __func__);
2839        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2840    }
2841    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2842
2843    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2844    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2845
2846    /*flash*/
2847    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2848    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2849
2850    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2851    settings.update(ANDROID_FLASH_FIRING_POWER,
2852            &flashFiringLevel, 1);
2853
2854    /* lens */
2855    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2856    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2857
2858    if (gCamCapability[mCameraId]->filter_densities_count) {
2859        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2860        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2861                        gCamCapability[mCameraId]->filter_densities_count);
2862    }
2863
2864    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2865    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2866
2867    /* frame duration */
2868    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2869    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2870
2871    /* sensitivity */
2872    static const int32_t default_sensitivity = 100;
2873    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2874
2875    /*edge mode*/
2876    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2877    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2878
2879    /*noise reduction mode*/
2880    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2881    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2882
2883    /*color correction mode*/
2884    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2885    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2886
2887    /*transform matrix mode*/
2888    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2889    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2890
2891    mDefaultMetadata[type] = settings.release();
2892
2893    pthread_mutex_unlock(&mMutex);
2894    return mDefaultMetadata[type];
2895}
2896
2897/*===========================================================================
2898 * FUNCTION   : setFrameParameters
2899 *
2900 * DESCRIPTION: set parameters per frame as requested in the metadata from
2901 *              framework
2902 *
2903 * PARAMETERS :
2904 *   @request   : request that needs to be serviced
2905 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2906 *
2907 * RETURN     : success: NO_ERROR
2908 *              failure:
2909 *==========================================================================*/
2910int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2911                    uint32_t streamTypeMask)
2912{
2913    /*translate from camera_metadata_t type to parm_type_t*/
2914    int rc = 0;
2915    if (request->settings == NULL && mFirstRequest) {
2916        /*settings cannot be null for the first request*/
2917        return BAD_VALUE;
2918    }
2919
2920    int32_t hal_version = CAM_HAL_V3;
2921
2922    memset(mParameters, 0, sizeof(parm_buffer_t));
2923    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2924    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2925                sizeof(hal_version), &hal_version);
2926    if (rc < 0) {
2927        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2928        return BAD_VALUE;
2929    }
2930
2931    /*we need to update the frame number in the parameters*/
2932    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2933                                sizeof(request->frame_number), &(request->frame_number));
2934    if (rc < 0) {
2935        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2936        return BAD_VALUE;
2937    }
2938
2939    /* Update stream id mask where buffers are requested */
2940    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2941                                sizeof(streamTypeMask), &streamTypeMask);
2942    if (rc < 0) {
2943        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2944        return BAD_VALUE;
2945    }
2946
2947    if(request->settings != NULL){
2948        rc = translateMetadataToParameters(request);
2949    }
2950    /*set the parameters to backend*/
2951    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2952    return rc;
2953}
2954
2955/*===========================================================================
2956 * FUNCTION   : translateMetadataToParameters
2957 *
2958 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2959 *
2960 *
2961 * PARAMETERS :
2962 *   @request  : request sent from framework
2963 *
2964 *
2965 * RETURN     : success: NO_ERROR
2966 *              failure:
2967 *==========================================================================*/
2968int QCamera3HardwareInterface::translateMetadataToParameters
2969                                  (const camera3_capture_request_t *request)
2970{
2971    int rc = 0;
2972    CameraMetadata frame_settings;
2973    frame_settings = request->settings;
2974
2975    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2976        int32_t antibandingMode =
2977            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2978        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2979                sizeof(antibandingMode), &antibandingMode);
2980    }
2981
2982    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2983        int32_t expCompensation = frame_settings.find(
2984            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2985        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2986            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2987        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2988            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2989        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2990          sizeof(expCompensation), &expCompensation);
2991    }
2992
2993    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2994        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2995        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2996                sizeof(aeLock), &aeLock);
2997    }
2998    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2999        cam_fps_range_t fps_range;
3000        fps_range.min_fps =
3001            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3002        fps_range.max_fps =
3003            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3004        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3005                sizeof(fps_range), &fps_range);
3006    }
3007
3008    float focalDistance = -1.0;
3009    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3010        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3011        rc = AddSetParmEntryToBatch(mParameters,
3012                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3013                sizeof(focalDistance), &focalDistance);
3014    }
3015
3016    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3017        uint8_t fwk_focusMode =
3018            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3019        uint8_t focusMode;
3020        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3021            focusMode = CAM_FOCUS_MODE_INFINITY;
3022        } else{
3023         focusMode = lookupHalName(FOCUS_MODES_MAP,
3024                                   sizeof(FOCUS_MODES_MAP),
3025                                   fwk_focusMode);
3026        }
3027        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3028                sizeof(focusMode), &focusMode);
3029    }
3030
3031    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3032        uint8_t awbLock =
3033            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3034        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3035                sizeof(awbLock), &awbLock);
3036    }
3037
3038    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3039        uint8_t fwk_whiteLevel =
3040            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3041        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3042                sizeof(WHITE_BALANCE_MODES_MAP),
3043                fwk_whiteLevel);
3044        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3045                sizeof(whiteLevel), &whiteLevel);
3046    }
3047
3048    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3049        uint8_t fwk_effectMode =
3050            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3051        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3052                sizeof(EFFECT_MODES_MAP),
3053                fwk_effectMode);
3054        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3055                sizeof(effectMode), &effectMode);
3056    }
3057
3058    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3059        uint8_t fwk_aeMode =
3060            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3061        uint8_t aeMode;
3062        int32_t redeye;
3063
3064        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3065            aeMode = CAM_AE_MODE_OFF;
3066        } else {
3067            aeMode = CAM_AE_MODE_ON;
3068        }
3069        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3070            redeye = 1;
3071        } else {
3072            redeye = 0;
3073        }
3074
3075        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3076                                          sizeof(AE_FLASH_MODE_MAP),
3077                                          fwk_aeMode);
3078        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3079                sizeof(aeMode), &aeMode);
3080        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3081                sizeof(flashMode), &flashMode);
3082        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3083                sizeof(redeye), &redeye);
3084    }
3085
3086    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3087        uint8_t colorCorrectMode =
3088            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3089        rc =
3090            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3091                    sizeof(colorCorrectMode), &colorCorrectMode);
3092    }
3093
3094    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3095        cam_color_correct_gains_t colorCorrectGains;
3096        for (int i = 0; i < 4; i++) {
3097            colorCorrectGains.gains[i] =
3098                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3099        }
3100        rc =
3101            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3102                    sizeof(colorCorrectGains), &colorCorrectGains);
3103    }
3104
3105    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3106        cam_color_correct_matrix_t colorCorrectTransform;
3107        cam_rational_type_t transform_elem;
3108        int num = 0;
3109        for (int i = 0; i < 3; i++) {
3110           for (int j = 0; j < 3; j++) {
3111              transform_elem.numerator =
3112                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3113              transform_elem.denominator =
3114                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3115              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3116              num++;
3117           }
3118        }
3119        rc =
3120            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3121                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3122    }
3123
3124    cam_trigger_t aecTrigger;
3125    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3126    aecTrigger.trigger_id = -1;
3127    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3128        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3129        aecTrigger.trigger =
3130            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3131        aecTrigger.trigger_id =
3132            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3133    }
3134    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3135                                sizeof(aecTrigger), &aecTrigger);
3136
3137    /*af_trigger must come with a trigger id*/
3138    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3139        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3140        cam_trigger_t af_trigger;
3141        af_trigger.trigger =
3142            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3143        af_trigger.trigger_id =
3144            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3145        rc = AddSetParmEntryToBatch(mParameters,
3146                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3147    }
3148
3149    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3150        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3151        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3152                sizeof(metaMode), &metaMode);
3153        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3154           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3155           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3156                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3157                                             fwk_sceneMode);
3158           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3159                sizeof(sceneMode), &sceneMode);
3160        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3161           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3162           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3163                sizeof(sceneMode), &sceneMode);
3164        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3165           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3166           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3167                sizeof(sceneMode), &sceneMode);
3168        }
3169    }
3170
3171    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3172        int32_t demosaic =
3173            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3174        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3175                sizeof(demosaic), &demosaic);
3176    }
3177
3178    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3179        cam_edge_application_t edge_application;
3180        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3181        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3182            edge_application.sharpness = 0;
3183        } else {
3184            edge_application.sharpness = 10;
3185        }
3186        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3187                sizeof(edge_application), &edge_application);
3188    }
3189
3190    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3191        int32_t edgeStrength =
3192            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3193        rc = AddSetParmEntryToBatch(mParameters,
3194                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3195    }
3196
3197    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3198        int32_t respectFlashMode = 1;
3199        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3200            uint8_t fwk_aeMode =
3201                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3202            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3203                respectFlashMode = 0;
3204                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3205                    __func__);
3206            }
3207        }
3208        if (respectFlashMode) {
3209            uint8_t flashMode =
3210                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3211            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3212                                          sizeof(FLASH_MODES_MAP),
3213                                          flashMode);
3214            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3215            // To check: CAM_INTF_META_FLASH_MODE usage
3216            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3217                          sizeof(flashMode), &flashMode);
3218        }
3219    }
3220
3221    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3222        uint8_t flashPower =
3223            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3224        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3225                sizeof(flashPower), &flashPower);
3226    }
3227
3228    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3229        int64_t flashFiringTime =
3230            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3231        rc = AddSetParmEntryToBatch(mParameters,
3232                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3233    }
3234
3235    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3236        uint8_t geometricMode =
3237            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3238        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3239                sizeof(geometricMode), &geometricMode);
3240    }
3241
3242    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3243        uint8_t geometricStrength =
3244            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3245        rc = AddSetParmEntryToBatch(mParameters,
3246                CAM_INTF_META_GEOMETRIC_STRENGTH,
3247                sizeof(geometricStrength), &geometricStrength);
3248    }
3249
3250    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3251        uint8_t hotPixelMode =
3252            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3253        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3254                sizeof(hotPixelMode), &hotPixelMode);
3255    }
3256
3257    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3258        float lensAperture =
3259            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3260        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3261                sizeof(lensAperture), &lensAperture);
3262    }
3263
3264    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3265        float filterDensity =
3266            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3267        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3268                sizeof(filterDensity), &filterDensity);
3269    }
3270
3271    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3272        float focalLength =
3273            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3274        rc = AddSetParmEntryToBatch(mParameters,
3275                CAM_INTF_META_LENS_FOCAL_LENGTH,
3276                sizeof(focalLength), &focalLength);
3277    }
3278
3279    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3280        uint8_t optStabMode =
3281            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3282        rc = AddSetParmEntryToBatch(mParameters,
3283                CAM_INTF_META_LENS_OPT_STAB_MODE,
3284                sizeof(optStabMode), &optStabMode);
3285    }
3286
3287    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3288        uint8_t noiseRedMode =
3289            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3290        rc = AddSetParmEntryToBatch(mParameters,
3291                CAM_INTF_META_NOISE_REDUCTION_MODE,
3292                sizeof(noiseRedMode), &noiseRedMode);
3293    }
3294
3295    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3296        uint8_t noiseRedStrength =
3297            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3298        rc = AddSetParmEntryToBatch(mParameters,
3299                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3300                sizeof(noiseRedStrength), &noiseRedStrength);
3301    }
3302
3303    cam_crop_region_t scalerCropRegion;
3304    bool scalerCropSet = false;
3305    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3306        scalerCropRegion.left =
3307            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3308        scalerCropRegion.top =
3309            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3310        scalerCropRegion.width =
3311            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3312        scalerCropRegion.height =
3313            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3314        rc = AddSetParmEntryToBatch(mParameters,
3315                CAM_INTF_META_SCALER_CROP_REGION,
3316                sizeof(scalerCropRegion), &scalerCropRegion);
3317        scalerCropSet = true;
3318    }
3319
3320    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3321        int64_t sensorExpTime =
3322            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3323        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3324        rc = AddSetParmEntryToBatch(mParameters,
3325                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3326                sizeof(sensorExpTime), &sensorExpTime);
3327    }
3328
3329    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3330        int64_t sensorFrameDuration =
3331            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3332        int64_t minFrameDuration = getMinFrameDuration(request);
3333        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3334        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3335            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3336        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3337        rc = AddSetParmEntryToBatch(mParameters,
3338                CAM_INTF_META_SENSOR_FRAME_DURATION,
3339                sizeof(sensorFrameDuration), &sensorFrameDuration);
3340    }
3341
3342    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3343        int32_t sensorSensitivity =
3344            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3345        if (sensorSensitivity <
3346                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3347            sensorSensitivity =
3348                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3349        if (sensorSensitivity >
3350                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3351            sensorSensitivity =
3352                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3353        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3354        rc = AddSetParmEntryToBatch(mParameters,
3355                CAM_INTF_META_SENSOR_SENSITIVITY,
3356                sizeof(sensorSensitivity), &sensorSensitivity);
3357    }
3358
3359    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3360        int32_t shadingMode =
3361            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3362        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3363                sizeof(shadingMode), &shadingMode);
3364    }
3365
3366    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3367        uint8_t shadingStrength =
3368            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3369        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3370                sizeof(shadingStrength), &shadingStrength);
3371    }
3372
3373    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3374        uint8_t fwk_facedetectMode =
3375            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3376        uint8_t facedetectMode =
3377            lookupHalName(FACEDETECT_MODES_MAP,
3378                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3379        rc = AddSetParmEntryToBatch(mParameters,
3380                CAM_INTF_META_STATS_FACEDETECT_MODE,
3381                sizeof(facedetectMode), &facedetectMode);
3382    }
3383
3384    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3385        uint8_t histogramMode =
3386            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3387        rc = AddSetParmEntryToBatch(mParameters,
3388                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3389                sizeof(histogramMode), &histogramMode);
3390    }
3391
3392    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3393        uint8_t sharpnessMapMode =
3394            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3395        rc = AddSetParmEntryToBatch(mParameters,
3396                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3397                sizeof(sharpnessMapMode), &sharpnessMapMode);
3398    }
3399
3400    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3401        uint8_t tonemapMode =
3402            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3403        rc = AddSetParmEntryToBatch(mParameters,
3404                CAM_INTF_META_TONEMAP_MODE,
3405                sizeof(tonemapMode), &tonemapMode);
3406    }
3407    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3408    /*All tonemap channels will have the same number of points*/
3409    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3410        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3411        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3412        cam_rgb_tonemap_curves tonemapCurves;
3413        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3414
3415        /* ch0 = G*/
3416        int point = 0;
3417        cam_tonemap_curve_t tonemapCurveGreen;
3418        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3419            for (int j = 0; j < 2; j++) {
3420               tonemapCurveGreen.tonemap_points[i][j] =
3421                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3422               point++;
3423            }
3424        }
3425        tonemapCurves.curves[0] = tonemapCurveGreen;
3426
3427        /* ch 1 = B */
3428        point = 0;
3429        cam_tonemap_curve_t tonemapCurveBlue;
3430        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3431            for (int j = 0; j < 2; j++) {
3432               tonemapCurveBlue.tonemap_points[i][j] =
3433                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3434               point++;
3435            }
3436        }
3437        tonemapCurves.curves[1] = tonemapCurveBlue;
3438
3439        /* ch 2 = R */
3440        point = 0;
3441        cam_tonemap_curve_t tonemapCurveRed;
3442        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3443            for (int j = 0; j < 2; j++) {
3444               tonemapCurveRed.tonemap_points[i][j] =
3445                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3446               point++;
3447            }
3448        }
3449        tonemapCurves.curves[2] = tonemapCurveRed;
3450
3451        rc = AddSetParmEntryToBatch(mParameters,
3452                CAM_INTF_META_TONEMAP_CURVES,
3453                sizeof(tonemapCurves), &tonemapCurves);
3454    }
3455
3456    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3457        uint8_t captureIntent =
3458            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3459        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3460                sizeof(captureIntent), &captureIntent);
3461    }
3462
3463    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3464        uint8_t blackLevelLock =
3465            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3466        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3467                sizeof(blackLevelLock), &blackLevelLock);
3468    }
3469
3470    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3471        uint8_t lensShadingMapMode =
3472            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3473        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3474                sizeof(lensShadingMapMode), &lensShadingMapMode);
3475    }
3476
3477    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3478        cam_area_t roi;
3479        bool reset = true;
3480        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3481        if (scalerCropSet) {
3482            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3483        }
3484        if (reset) {
3485            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3486                    sizeof(roi), &roi);
3487        }
3488    }
3489
3490    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3491        cam_area_t roi;
3492        bool reset = true;
3493        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3494        if (scalerCropSet) {
3495            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3496        }
3497        if (reset) {
3498            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3499                    sizeof(roi), &roi);
3500        }
3501    }
3502
3503    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3504        cam_area_t roi;
3505        bool reset = true;
3506        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3507        if (scalerCropSet) {
3508            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3509        }
3510        if (reset) {
3511            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3512                    sizeof(roi), &roi);
3513        }
3514    }
3515    return rc;
3516}
3517
3518/*===========================================================================
3519 * FUNCTION   : getJpegSettings
3520 *
3521 * DESCRIPTION: save the jpeg settings in the HAL
3522 *
3523 *
3524 * PARAMETERS :
3525 *   @settings  : frame settings information from framework
3526 *
3527 *
3528 * RETURN     : success: NO_ERROR
3529 *              failure:
3530 *==========================================================================*/
3531int QCamera3HardwareInterface::getJpegSettings
3532                                  (const camera_metadata_t *settings)
3533{
3534    if (mJpegSettings) {
3535        if (mJpegSettings->gps_timestamp) {
3536            free(mJpegSettings->gps_timestamp);
3537            mJpegSettings->gps_timestamp = NULL;
3538        }
3539        if (mJpegSettings->gps_coordinates) {
3540            for (int i = 0; i < 3; i++) {
3541                free(mJpegSettings->gps_coordinates[i]);
3542                mJpegSettings->gps_coordinates[i] = NULL;
3543            }
3544        }
3545        free(mJpegSettings);
3546        mJpegSettings = NULL;
3547    }
3548    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3549    CameraMetadata jpeg_settings;
3550    jpeg_settings = settings;
3551
3552    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3553        mJpegSettings->jpeg_orientation =
3554            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3555    } else {
3556        mJpegSettings->jpeg_orientation = 0;
3557    }
3558    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3559        mJpegSettings->jpeg_quality =
3560            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3561    } else {
3562        mJpegSettings->jpeg_quality = 85;
3563    }
3564    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3565        mJpegSettings->thumbnail_size.width =
3566            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3567        mJpegSettings->thumbnail_size.height =
3568            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3569    } else {
3570        mJpegSettings->thumbnail_size.width = 0;
3571        mJpegSettings->thumbnail_size.height = 0;
3572    }
3573    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3574        for (int i = 0; i < 3; i++) {
3575            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3576            *(mJpegSettings->gps_coordinates[i]) =
3577                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3578        }
3579    } else{
3580       for (int i = 0; i < 3; i++) {
3581            mJpegSettings->gps_coordinates[i] = NULL;
3582        }
3583    }
3584
3585    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3586        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3587        *(mJpegSettings->gps_timestamp) =
3588            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3589    } else {
3590        mJpegSettings->gps_timestamp = NULL;
3591    }
3592
3593    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3594        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3595        for (int i = 0; i < len; i++) {
3596            mJpegSettings->gps_processing_method[i] =
3597                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3598        }
3599        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3600            mJpegSettings->gps_processing_method[len] = '\0';
3601        }
3602    } else {
3603        mJpegSettings->gps_processing_method[0] = '\0';
3604    }
3605
3606    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3607        mJpegSettings->sensor_sensitivity =
3608            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3609    } else {
3610        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3611    }
3612
3613    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3614
3615    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3616        mJpegSettings->lens_focal_length =
3617            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3618    }
3619    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3620        mJpegSettings->exposure_compensation =
3621            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3622    }
3623    mJpegSettings->sharpness = 10; //default value
3624    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3625        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3626        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3627            mJpegSettings->sharpness = 0;
3628        }
3629    }
3630    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3631    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3632    mJpegSettings->is_jpeg_format = true;
3633    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3634    return 0;
3635}
3636
3637/*===========================================================================
3638 * FUNCTION   : captureResultCb
3639 *
3640 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3641 *
3642 * PARAMETERS :
3643 *   @frame  : frame information from mm-camera-interface
3644 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3645 *   @userdata: userdata
3646 *
3647 * RETURN     : NONE
3648 *==========================================================================*/
3649void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3650                camera3_stream_buffer_t *buffer,
3651                uint32_t frame_number, void *userdata)
3652{
3653    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3654    if (hw == NULL) {
3655        ALOGE("%s: Invalid hw %p", __func__, hw);
3656        return;
3657    }
3658
3659    hw->captureResultCb(metadata, buffer, frame_number);
3660    return;
3661}
3662
3663
3664/*===========================================================================
3665 * FUNCTION   : initialize
3666 *
3667 * DESCRIPTION: Pass framework callback pointers to HAL
3668 *
3669 * PARAMETERS :
3670 *
3671 *
3672 * RETURN     : Success : 0
3673 *              Failure: -ENODEV
3674 *==========================================================================*/
3675
3676int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3677                                  const camera3_callback_ops_t *callback_ops)
3678{
3679    ALOGV("%s: E", __func__);
3680    QCamera3HardwareInterface *hw =
3681        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3682    if (!hw) {
3683        ALOGE("%s: NULL camera device", __func__);
3684        return -ENODEV;
3685    }
3686
3687    int rc = hw->initialize(callback_ops);
3688    ALOGV("%s: X", __func__);
3689    return rc;
3690}
3691
3692/*===========================================================================
3693 * FUNCTION   : configure_streams
3694 *
3695 * DESCRIPTION:
3696 *
3697 * PARAMETERS :
3698 *
3699 *
3700 * RETURN     : Success: 0
3701 *              Failure: -EINVAL (if stream configuration is invalid)
3702 *                       -ENODEV (fatal error)
3703 *==========================================================================*/
3704
3705int QCamera3HardwareInterface::configure_streams(
3706        const struct camera3_device *device,
3707        camera3_stream_configuration_t *stream_list)
3708{
3709    ALOGV("%s: E", __func__);
3710    QCamera3HardwareInterface *hw =
3711        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3712    if (!hw) {
3713        ALOGE("%s: NULL camera device", __func__);
3714        return -ENODEV;
3715    }
3716    int rc = hw->configureStreams(stream_list);
3717    ALOGV("%s: X", __func__);
3718    return rc;
3719}
3720
3721/*===========================================================================
3722 * FUNCTION   : register_stream_buffers
3723 *
3724 * DESCRIPTION: Register stream buffers with the device
3725 *
3726 * PARAMETERS :
3727 *
3728 * RETURN     :
3729 *==========================================================================*/
3730int QCamera3HardwareInterface::register_stream_buffers(
3731        const struct camera3_device *device,
3732        const camera3_stream_buffer_set_t *buffer_set)
3733{
3734    ALOGV("%s: E", __func__);
3735    QCamera3HardwareInterface *hw =
3736        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3737    if (!hw) {
3738        ALOGE("%s: NULL camera device", __func__);
3739        return -ENODEV;
3740    }
3741    int rc = hw->registerStreamBuffers(buffer_set);
3742    ALOGV("%s: X", __func__);
3743    return rc;
3744}
3745
3746/*===========================================================================
3747 * FUNCTION   : construct_default_request_settings
3748 *
3749 * DESCRIPTION: Configure a settings buffer to meet the required use case
3750 *
3751 * PARAMETERS :
3752 *
3753 *
3754 * RETURN     : Success: Return valid metadata
3755 *              Failure: Return NULL
3756 *==========================================================================*/
3757const camera_metadata_t* QCamera3HardwareInterface::
3758    construct_default_request_settings(const struct camera3_device *device,
3759                                        int type)
3760{
3761
3762    ALOGV("%s: E", __func__);
3763    camera_metadata_t* fwk_metadata = NULL;
3764    QCamera3HardwareInterface *hw =
3765        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3766    if (!hw) {
3767        ALOGE("%s: NULL camera device", __func__);
3768        return NULL;
3769    }
3770
3771    fwk_metadata = hw->translateCapabilityToMetadata(type);
3772
3773    ALOGV("%s: X", __func__);
3774    return fwk_metadata;
3775}
3776
3777/*===========================================================================
3778 * FUNCTION   : process_capture_request
3779 *
3780 * DESCRIPTION:
3781 *
3782 * PARAMETERS :
3783 *
3784 *
3785 * RETURN     :
3786 *==========================================================================*/
3787int QCamera3HardwareInterface::process_capture_request(
3788                    const struct camera3_device *device,
3789                    camera3_capture_request_t *request)
3790{
3791    ALOGV("%s: E", __func__);
3792    QCamera3HardwareInterface *hw =
3793        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3794    if (!hw) {
3795        ALOGE("%s: NULL camera device", __func__);
3796        return -EINVAL;
3797    }
3798
3799    int rc = hw->processCaptureRequest(request);
3800    ALOGV("%s: X", __func__);
3801    return rc;
3802}
3803
3804/*===========================================================================
3805 * FUNCTION   : get_metadata_vendor_tag_ops
3806 *
3807 * DESCRIPTION:
3808 *
3809 * PARAMETERS :
3810 *
3811 *
3812 * RETURN     :
3813 *==========================================================================*/
3814
3815void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3816                const struct camera3_device *device,
3817                vendor_tag_query_ops_t* ops)
3818{
3819    ALOGV("%s: E", __func__);
3820    QCamera3HardwareInterface *hw =
3821        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3822    if (!hw) {
3823        ALOGE("%s: NULL camera device", __func__);
3824        return;
3825    }
3826
3827    hw->getMetadataVendorTagOps(ops);
3828    ALOGV("%s: X", __func__);
3829    return;
3830}
3831
3832/*===========================================================================
3833 * FUNCTION   : dump
3834 *
3835 * DESCRIPTION:
3836 *
3837 * PARAMETERS :
3838 *
3839 *
3840 * RETURN     :
3841 *==========================================================================*/
3842
3843void QCamera3HardwareInterface::dump(
3844                const struct camera3_device *device, int fd)
3845{
3846    ALOGV("%s: E", __func__);
3847    QCamera3HardwareInterface *hw =
3848        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3849    if (!hw) {
3850        ALOGE("%s: NULL camera device", __func__);
3851        return;
3852    }
3853
3854    hw->dump(fd);
3855    ALOGV("%s: X", __func__);
3856    return;
3857}
3858
3859/*===========================================================================
3860 * FUNCTION   : flush
3861 *
3862 * DESCRIPTION:
3863 *
3864 * PARAMETERS :
3865 *
3866 *
3867 * RETURN     :
3868 *==========================================================================*/
3869
3870int QCamera3HardwareInterface::flush(
3871                const struct camera3_device *device)
3872{
3873    int rc;
3874    ALOGV("%s: E", __func__);
3875    QCamera3HardwareInterface *hw =
3876        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3877    if (!hw) {
3878        ALOGE("%s: NULL camera device", __func__);
3879        return -EINVAL;
3880    }
3881
3882    rc = hw->flush();
3883    ALOGV("%s: X", __func__);
3884    return rc;
3885}
3886
3887/*===========================================================================
3888 * FUNCTION   : close_camera_device
3889 *
3890 * DESCRIPTION:
3891 *
3892 * PARAMETERS :
3893 *
3894 *
3895 * RETURN     :
3896 *==========================================================================*/
3897int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3898{
3899    ALOGV("%s: E", __func__);
3900    int ret = NO_ERROR;
3901    QCamera3HardwareInterface *hw =
3902        reinterpret_cast<QCamera3HardwareInterface *>(
3903            reinterpret_cast<camera3_device_t *>(device)->priv);
3904    if (!hw) {
3905        ALOGE("NULL camera device");
3906        return BAD_VALUE;
3907    }
3908    delete hw;
3909
3910    pthread_mutex_lock(&mCameraSessionLock);
3911    mCameraSessionActive = 0;
3912    pthread_mutex_unlock(&mCameraSessionLock);
3913    ALOGV("%s: X", __func__);
3914    return ret;
3915}
3916
3917/*===========================================================================
3918 * FUNCTION   : getWaveletDenoiseProcessPlate
3919 *
3920 * DESCRIPTION: query wavelet denoise process plate
3921 *
3922 * PARAMETERS : None
3923 *
3924 * RETURN     : WNR prcocess plate vlaue
3925 *==========================================================================*/
3926cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3927{
3928    char prop[PROPERTY_VALUE_MAX];
3929    memset(prop, 0, sizeof(prop));
3930    property_get("persist.denoise.process.plates", prop, "0");
3931    int processPlate = atoi(prop);
3932    switch(processPlate) {
3933    case 0:
3934        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3935    case 1:
3936        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3937    case 2:
3938        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3939    case 3:
3940        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3941    default:
3942        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3943    }
3944}
3945
3946/*===========================================================================
3947 * FUNCTION   : needRotationReprocess
3948 *
3949 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3950 *
3951 * PARAMETERS : none
3952 *
3953 * RETURN     : true: needed
3954 *              false: no need
3955 *==========================================================================*/
3956bool QCamera3HardwareInterface::needRotationReprocess()
3957{
3958
3959    if (!mJpegSettings->is_jpeg_format) {
3960        // RAW image, no need to reprocess
3961        return false;
3962    }
3963
3964    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3965        mJpegSettings->jpeg_orientation > 0) {
3966        // current rotation is not zero, and pp has the capability to process rotation
3967        ALOGD("%s: need do reprocess for rotation", __func__);
3968        return true;
3969    }
3970
3971    return false;
3972}
3973
3974/*===========================================================================
3975 * FUNCTION   : needReprocess
3976 *
3977 * DESCRIPTION: if reprocess in needed
3978 *
3979 * PARAMETERS : none
3980 *
3981 * RETURN     : true: needed
3982 *              false: no need
3983 *==========================================================================*/
3984bool QCamera3HardwareInterface::needReprocess()
3985{
3986    if (!mJpegSettings->is_jpeg_format) {
3987        // RAW image, no need to reprocess
3988        return false;
3989    }
3990
3991    if ((mJpegSettings->min_required_pp_mask > 0) ||
3992         isWNREnabled()) {
3993        // TODO: add for ZSL HDR later
3994        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3995        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3996        return true;
3997    }
3998    return needRotationReprocess();
3999}
4000
4001/*===========================================================================
4002 * FUNCTION   : addOnlineReprocChannel
4003 *
4004 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4005 *              coming from input channel
4006 *
4007 * PARAMETERS :
4008 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4009 *
4010 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4011 *==========================================================================*/
4012QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4013              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4014{
4015    int32_t rc = NO_ERROR;
4016    QCamera3ReprocessChannel *pChannel = NULL;
4017    if (pInputChannel == NULL) {
4018        ALOGE("%s: input channel obj is NULL", __func__);
4019        return NULL;
4020    }
4021
4022    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4023            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4024    if (NULL == pChannel) {
4025        ALOGE("%s: no mem for reprocess channel", __func__);
4026        return NULL;
4027    }
4028
4029    // Capture channel, only need snapshot and postview streams start together
4030    mm_camera_channel_attr_t attr;
4031    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4032    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4033    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4034    rc = pChannel->initialize();
4035    if (rc != NO_ERROR) {
4036        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4037        delete pChannel;
4038        return NULL;
4039    }
4040
4041    // pp feature config
4042    cam_pp_feature_config_t pp_config;
4043    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4044    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4045        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4046        pp_config.sharpness = mJpegSettings->sharpness;
4047    }
4048
4049    if (isWNREnabled()) {
4050        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4051        pp_config.denoise2d.denoise_enable = 1;
4052        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4053    }
4054    if (needRotationReprocess()) {
4055        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4056        int rotation = mJpegSettings->jpeg_orientation;
4057        if (rotation == 0) {
4058            pp_config.rotation = ROTATE_0;
4059        } else if (rotation == 90) {
4060            pp_config.rotation = ROTATE_90;
4061        } else if (rotation == 180) {
4062            pp_config.rotation = ROTATE_180;
4063        } else if (rotation == 270) {
4064            pp_config.rotation = ROTATE_270;
4065        }
4066    }
4067
4068   rc = pChannel->addReprocStreamsFromSource(pp_config,
4069                                             pInputChannel,
4070                                             mMetadataChannel);
4071
4072    if (rc != NO_ERROR) {
4073        delete pChannel;
4074        return NULL;
4075    }
4076    return pChannel;
4077}
4078
4079int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4080{
4081    return gCamCapability[mCameraId]->min_num_pp_bufs;
4082}
4083
4084bool QCamera3HardwareInterface::isWNREnabled() {
4085    return gCamCapability[mCameraId]->isWnrSupported;
4086}
4087
4088}; //end namespace qcamera
4089