QCamera3HWI.cpp revision 4e5fef6d28e89ec2635f92ea344d3947a7bd4422
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        if (mMetadataChannel) {
254            mMetadataChannel->stop();
255            delete mMetadataChannel;
256            mMetadataChannel = NULL;
257        }
258        deinitParameters();
259    }
260
261    if (mCameraOpened)
262        closeCamera();
263
264    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
265        if (mDefaultMetadata[i])
266            free_camera_metadata(mDefaultMetadata[i]);
267
268    pthread_cond_destroy(&mRequestCond);
269
270    pthread_mutex_destroy(&mMutex);
271    ALOGV("%s: X", __func__);
272}
273
274/*===========================================================================
275 * FUNCTION   : openCamera
276 *
277 * DESCRIPTION: open camera
278 *
279 * PARAMETERS :
280 *   @hw_device  : double ptr for camera device struct
281 *
282 * RETURN     : int32_t type of status
283 *              NO_ERROR  -- success
284 *              none-zero failure code
285 *==========================================================================*/
286int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
287{
288    int rc = 0;
289    pthread_mutex_lock(&mCameraSessionLock);
290    if (mCameraSessionActive) {
291        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
292        pthread_mutex_unlock(&mCameraSessionLock);
293        return INVALID_OPERATION;
294    }
295
296    if (mCameraOpened) {
297        *hw_device = NULL;
298        return PERMISSION_DENIED;
299    }
300
301    rc = openCamera();
302    if (rc == 0) {
303        *hw_device = &mCameraDevice.common;
304        mCameraSessionActive = 1;
305    } else
306        *hw_device = NULL;
307
308#ifdef HAS_MULTIMEDIA_HINTS
309    if (rc == 0) {
310        if (m_pPowerModule) {
311            if (m_pPowerModule->powerHint) {
312                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
313                        (void *)"state=1");
314            }
315        }
316    }
317#endif
318    pthread_mutex_unlock(&mCameraSessionLock);
319    return rc;
320}
321
322/*===========================================================================
323 * FUNCTION   : openCamera
324 *
325 * DESCRIPTION: open camera
326 *
327 * PARAMETERS : none
328 *
329 * RETURN     : int32_t type of status
330 *              NO_ERROR  -- success
331 *              none-zero failure code
332 *==========================================================================*/
333int QCamera3HardwareInterface::openCamera()
334{
335    if (mCameraHandle) {
336        ALOGE("Failure: Camera already opened");
337        return ALREADY_EXISTS;
338    }
339    mCameraHandle = camera_open(mCameraId);
340    if (!mCameraHandle) {
341        ALOGE("camera_open failed.");
342        return UNKNOWN_ERROR;
343    }
344
345    mCameraOpened = true;
346
347    return NO_ERROR;
348}
349
350/*===========================================================================
351 * FUNCTION   : closeCamera
352 *
353 * DESCRIPTION: close camera
354 *
355 * PARAMETERS : none
356 *
357 * RETURN     : int32_t type of status
358 *              NO_ERROR  -- success
359 *              none-zero failure code
360 *==========================================================================*/
361int QCamera3HardwareInterface::closeCamera()
362{
363    int rc = NO_ERROR;
364
365    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
366    mCameraHandle = NULL;
367    mCameraOpened = false;
368
369#ifdef HAS_MULTIMEDIA_HINTS
370    if (rc == NO_ERROR) {
371        if (m_pPowerModule) {
372            if (m_pPowerModule->powerHint) {
373                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
374                        (void *)"state=0");
375            }
376        }
377    }
378#endif
379
380    return rc;
381}
382
383/*===========================================================================
384 * FUNCTION   : initialize
385 *
386 * DESCRIPTION: Initialize frameworks callback functions
387 *
388 * PARAMETERS :
389 *   @callback_ops : callback function to frameworks
390 *
391 * RETURN     :
392 *
393 *==========================================================================*/
394int QCamera3HardwareInterface::initialize(
395        const struct camera3_callback_ops *callback_ops)
396{
397    int rc;
398
399    pthread_mutex_lock(&mMutex);
400
401    rc = initParameters();
402    if (rc < 0) {
403        ALOGE("%s: initParamters failed %d", __func__, rc);
404       goto err1;
405    }
406    mCallbackOps = callback_ops;
407
408    pthread_mutex_unlock(&mMutex);
409    mCameraInitialized = true;
410    return 0;
411
412err1:
413    pthread_mutex_unlock(&mMutex);
414    return rc;
415}
416
417/*===========================================================================
418 * FUNCTION   : configureStreams
419 *
420 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
421 *              and output streams.
422 *
423 * PARAMETERS :
424 *   @stream_list : streams to be configured
425 *
426 * RETURN     :
427 *
428 *==========================================================================*/
429int QCamera3HardwareInterface::configureStreams(
430        camera3_stream_configuration_t *streamList)
431{
432    int rc = 0;
433    mIsZslMode = false;
434
435    // Sanity check stream_list
436    if (streamList == NULL) {
437        ALOGE("%s: NULL stream configuration", __func__);
438        return BAD_VALUE;
439    }
440    if (streamList->streams == NULL) {
441        ALOGE("%s: NULL stream list", __func__);
442        return BAD_VALUE;
443    }
444
445    if (streamList->num_streams < 1) {
446        ALOGE("%s: Bad number of streams requested: %d", __func__,
447                streamList->num_streams);
448        return BAD_VALUE;
449    }
450
451    /* first invalidate all the steams in the mStreamList
452     * if they appear again, they will be validated */
453    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
454            it != mStreamInfo.end(); it++) {
455        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
456        channel->stop();
457        (*it)->status = INVALID;
458    }
459    if (mMetadataChannel) {
460        /* If content of mStreamInfo is not 0, there is metadata stream */
461        mMetadataChannel->stop();
462    }
463
464    pthread_mutex_lock(&mMutex);
465
466    camera3_stream_t *inputStream = NULL;
467    camera3_stream_t *jpegStream = NULL;
468    cam_stream_size_info_t stream_config_info;
469
470    for (size_t i = 0; i < streamList->num_streams; i++) {
471        camera3_stream_t *newStream = streamList->streams[i];
472        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
473                __func__, newStream->stream_type, newStream->format,
474                 newStream->width, newStream->height);
475        //if the stream is in the mStreamList validate it
476        bool stream_exists = false;
477        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
478                it != mStreamInfo.end(); it++) {
479            if ((*it)->stream == newStream) {
480                QCamera3Channel *channel =
481                    (QCamera3Channel*)(*it)->stream->priv;
482                stream_exists = true;
483                (*it)->status = RECONFIGURE;
484                /*delete the channel object associated with the stream because
485                  we need to reconfigure*/
486                delete channel;
487                (*it)->stream->priv = NULL;
488            }
489        }
490        if (!stream_exists) {
491            //new stream
492            stream_info_t* stream_info;
493            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
494            stream_info->stream = newStream;
495            stream_info->status = VALID;
496            stream_info->registered = 0;
497            mStreamInfo.push_back(stream_info);
498        }
499        if (newStream->stream_type == CAMERA3_STREAM_INPUT
500                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
501            if (inputStream != NULL) {
502                ALOGE("%s: Multiple input streams requested!", __func__);
503                pthread_mutex_unlock(&mMutex);
504                return BAD_VALUE;
505            }
506            inputStream = newStream;
507        }
508        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
509            jpegStream = newStream;
510        }
511    }
512    mInputStream = inputStream;
513
514    /*clean up invalid streams*/
515    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
516            it != mStreamInfo.end();) {
517        if(((*it)->status) == INVALID){
518            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
519            delete channel;
520            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
521            free(*it);
522            it = mStreamInfo.erase(it);
523        } else {
524            it++;
525        }
526    }
527    if (mMetadataChannel) {
528        delete mMetadataChannel;
529        mMetadataChannel = NULL;
530    }
531
532    //Create metadata channel and initialize it
533    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
534                    mCameraHandle->ops, captureResultCb,
535                    &gCamCapability[mCameraId]->padding_info, this);
536    if (mMetadataChannel == NULL) {
537        ALOGE("%s: failed to allocate metadata channel", __func__);
538        rc = -ENOMEM;
539        pthread_mutex_unlock(&mMutex);
540        return rc;
541    }
542    rc = mMetadataChannel->initialize();
543    if (rc < 0) {
544        ALOGE("%s: metadata channel initialization failed", __func__);
545        delete mMetadataChannel;
546        pthread_mutex_unlock(&mMutex);
547        return rc;
548    }
549
550    /* Allocate channel objects for the requested streams */
551    for (size_t i = 0; i < streamList->num_streams; i++) {
552        camera3_stream_t *newStream = streamList->streams[i];
553        uint32_t stream_usage = newStream->usage;
554        stream_config_info.stream_sizes[i].width = newStream->width;
555        stream_config_info.stream_sizes[i].height = newStream->height;
556        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
557            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
558            //for zsl stream the size is jpeg size
559            stream_config_info.stream_sizes[i].width = jpegStream->width;
560            stream_config_info.stream_sizes[i].height = jpegStream->height;
561            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
562        } else {
563           //for non zsl streams find out the format
564           switch (newStream->format) {
565           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
566              {
567                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
569                 } else {
570                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
571                 }
572              }
573              break;
574           case HAL_PIXEL_FORMAT_YCbCr_420_888:
575              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
576              break;
577           case HAL_PIXEL_FORMAT_BLOB:
578              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
579              break;
580           default:
581              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
582              break;
583           }
584        }
585        if (newStream->priv == NULL) {
586            //New stream, construct channel
587            switch (newStream->stream_type) {
588            case CAMERA3_STREAM_INPUT:
589                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
590                break;
591            case CAMERA3_STREAM_BIDIRECTIONAL:
592                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
593                    GRALLOC_USAGE_HW_CAMERA_WRITE;
594                break;
595            case CAMERA3_STREAM_OUTPUT:
596                /* For video encoding stream, set read/write rarely
597                 * flag so that they may be set to un-cached */
598                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
599                    newStream->usage =
600                         (GRALLOC_USAGE_SW_READ_RARELY |
601                         GRALLOC_USAGE_SW_WRITE_RARELY |
602                         GRALLOC_USAGE_HW_CAMERA_WRITE);
603                else
604                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
605                break;
606            default:
607                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
608                break;
609            }
610
611            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
612                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
613                QCamera3Channel *channel;
614                switch (newStream->format) {
615                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
616                case HAL_PIXEL_FORMAT_YCbCr_420_888:
617                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
618                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
619                        jpegStream) {
620                        uint32_t width = jpegStream->width;
621                        uint32_t height = jpegStream->height;
622                        mIsZslMode = true;
623                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
624                            mCameraHandle->ops, captureResultCb,
625                            &gCamCapability[mCameraId]->padding_info, this, newStream,
626                            width, height);
627                    } else
628                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
629                            mCameraHandle->ops, captureResultCb,
630                            &gCamCapability[mCameraId]->padding_info, this, newStream);
631                    if (channel == NULL) {
632                        ALOGE("%s: allocation of channel failed", __func__);
633                        pthread_mutex_unlock(&mMutex);
634                        return -ENOMEM;
635                    }
636
637                    newStream->priv = channel;
638                    break;
639                case HAL_PIXEL_FORMAT_BLOB:
640                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
641                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
642                            mCameraHandle->ops, captureResultCb,
643                            &gCamCapability[mCameraId]->padding_info, this, newStream);
644                    if (mPictureChannel == NULL) {
645                        ALOGE("%s: allocation of channel failed", __func__);
646                        pthread_mutex_unlock(&mMutex);
647                        return -ENOMEM;
648                    }
649                    newStream->priv = (QCamera3Channel*)mPictureChannel;
650                    break;
651
652                //TODO: Add support for app consumed format?
653                default:
654                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
655                    break;
656                }
657            }
658        } else {
659            // Channel already exists for this stream
660            // Do nothing for now
661        }
662    }
663    /*For the streams to be reconfigured we need to register the buffers
664      since the framework wont*/
665    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
666            it != mStreamInfo.end(); it++) {
667        if ((*it)->status == RECONFIGURE) {
668            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
669            /*only register buffers for streams that have already been
670              registered*/
671            if ((*it)->registered) {
672                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
673                        (*it)->buffer_set.buffers);
674                if (rc != NO_ERROR) {
675                    ALOGE("%s: Failed to register the buffers of old stream,\
676                            rc = %d", __func__, rc);
677                }
678                ALOGV("%s: channel %p has %d buffers",
679                        __func__, channel, (*it)->buffer_set.num_buffers);
680            }
681        }
682
683        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
684        if (index == NAME_NOT_FOUND) {
685            mPendingBuffersMap.add((*it)->stream, 0);
686        } else {
687            mPendingBuffersMap.editValueAt(index) = 0;
688        }
689    }
690
691    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
692    mPendingRequestsList.clear();
693
694    /*flush the metadata list*/
695    if (!mStoredMetadataList.empty()) {
696        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
697              m != mStoredMetadataList.end(); m++) {
698            mMetadataChannel->bufDone(m->meta_buf);
699            free(m->meta_buf);
700            m = mStoredMetadataList.erase(m);
701        }
702    }
703    int32_t hal_version = CAM_HAL_V3;
704    stream_config_info.num_streams = streamList->num_streams;
705
706    //settings/parameters don't carry over for new configureStreams
707    memset(mParameters, 0, sizeof(parm_buffer_t));
708
709    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
710    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
711                sizeof(hal_version), &hal_version);
712
713    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
714                sizeof(stream_config_info), &stream_config_info);
715
716    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
717
718    mFirstRequest = true;
719
720    //Get min frame duration for this streams configuration
721    deriveMinFrameDuration();
722
723    pthread_mutex_unlock(&mMutex);
724    return rc;
725}
726
727/*===========================================================================
728 * FUNCTION   : validateCaptureRequest
729 *
730 * DESCRIPTION: validate a capture request from camera service
731 *
732 * PARAMETERS :
733 *   @request : request from framework to process
734 *
735 * RETURN     :
736 *
737 *==========================================================================*/
738int QCamera3HardwareInterface::validateCaptureRequest(
739                    camera3_capture_request_t *request)
740{
741    ssize_t idx = 0;
742    const camera3_stream_buffer_t *b;
743    CameraMetadata meta;
744
745    /* Sanity check the request */
746    if (request == NULL) {
747        ALOGE("%s: NULL capture request", __func__);
748        return BAD_VALUE;
749    }
750
751    uint32_t frameNumber = request->frame_number;
752    if (request->input_buffer != NULL &&
753            request->input_buffer->stream != mInputStream) {
754        ALOGE("%s: Request %d: Input buffer not from input stream!",
755                __FUNCTION__, frameNumber);
756        return BAD_VALUE;
757    }
758    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
759        ALOGE("%s: Request %d: No output buffers provided!",
760                __FUNCTION__, frameNumber);
761        return BAD_VALUE;
762    }
763    if (request->input_buffer != NULL) {
764        b = request->input_buffer;
765        QCamera3Channel *channel =
766            static_cast<QCamera3Channel*>(b->stream->priv);
767        if (channel == NULL) {
768            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
769                    __func__, frameNumber, idx);
770            return BAD_VALUE;
771        }
772        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
773            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
774                    __func__, frameNumber, idx);
775            return BAD_VALUE;
776        }
777        if (b->release_fence != -1) {
778            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
779                    __func__, frameNumber, idx);
780            return BAD_VALUE;
781        }
782        if (b->buffer == NULL) {
783            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
784                    __func__, frameNumber, idx);
785            return BAD_VALUE;
786        }
787    }
788
789    // Validate all buffers
790    b = request->output_buffers;
791    do {
792        QCamera3Channel *channel =
793                static_cast<QCamera3Channel*>(b->stream->priv);
794        if (channel == NULL) {
795            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
796                    __func__, frameNumber, idx);
797            return BAD_VALUE;
798        }
799        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
800            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
801                    __func__, frameNumber, idx);
802            return BAD_VALUE;
803        }
804        if (b->release_fence != -1) {
805            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
806                    __func__, frameNumber, idx);
807            return BAD_VALUE;
808        }
809        if (b->buffer == NULL) {
810            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
811                    __func__, frameNumber, idx);
812            return BAD_VALUE;
813        }
814        idx++;
815        b = request->output_buffers + idx;
816    } while (idx < (ssize_t)request->num_output_buffers);
817
818    return NO_ERROR;
819}
820
821/*===========================================================================
822 * FUNCTION   : deriveMinFrameDuration
823 *
824 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
825 *              on currently configured streams.
826 *
827 * PARAMETERS : NONE
828 *
829 * RETURN     : NONE
830 *
831 *==========================================================================*/
832void QCamera3HardwareInterface::deriveMinFrameDuration()
833{
834    int32_t maxJpegDimension, maxProcessedDimension;
835
836    maxJpegDimension = 0;
837    maxProcessedDimension = 0;
838
839    // Figure out maximum jpeg, processed, and raw dimensions
840    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
841        it != mStreamInfo.end(); it++) {
842
843        // Input stream doesn't have valid stream_type
844        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
845            continue;
846
847        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
848        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
849            if (dimension > maxJpegDimension)
850                maxJpegDimension = dimension;
851        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
852            if (dimension > maxProcessedDimension)
853                maxProcessedDimension = dimension;
854        }
855    }
856
857    //Assume all jpeg dimensions are in processed dimensions.
858    if (maxJpegDimension > maxProcessedDimension)
859        maxProcessedDimension = maxJpegDimension;
860
861    //Find minimum durations for processed, jpeg, and raw
862    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
863    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
864        if (maxProcessedDimension ==
865            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
866            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
867            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
868            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
869            break;
870        }
871    }
872}
873
874/*===========================================================================
875 * FUNCTION   : getMinFrameDuration
876 *
877 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
878 *              and current request configuration.
879 *
880 * PARAMETERS : @request: requset sent by the frameworks
881 *
882 * RETURN     : min farme duration for a particular request
883 *
884 *==========================================================================*/
885int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
886{
887    bool hasJpegStream = false;
888    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
889        const camera3_stream_t *stream = request->output_buffers[i].stream;
890        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
891            hasJpegStream = true;
892    }
893
894    if (!hasJpegStream)
895        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
896    else
897        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
898}
899
900/*===========================================================================
901 * FUNCTION   : registerStreamBuffers
902 *
903 * DESCRIPTION: Register buffers for a given stream with the HAL device.
904 *
905 * PARAMETERS :
906 *   @stream_list : streams to be configured
907 *
908 * RETURN     :
909 *
910 *==========================================================================*/
911int QCamera3HardwareInterface::registerStreamBuffers(
912        const camera3_stream_buffer_set_t *buffer_set)
913{
914    int rc = 0;
915
916    pthread_mutex_lock(&mMutex);
917
918    if (buffer_set == NULL) {
919        ALOGE("%s: Invalid buffer_set parameter.", __func__);
920        pthread_mutex_unlock(&mMutex);
921        return -EINVAL;
922    }
923    if (buffer_set->stream == NULL) {
924        ALOGE("%s: Invalid stream parameter.", __func__);
925        pthread_mutex_unlock(&mMutex);
926        return -EINVAL;
927    }
928    if (buffer_set->num_buffers < 1) {
929        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
930        pthread_mutex_unlock(&mMutex);
931        return -EINVAL;
932    }
933    if (buffer_set->buffers == NULL) {
934        ALOGE("%s: Invalid buffers parameter.", __func__);
935        pthread_mutex_unlock(&mMutex);
936        return -EINVAL;
937    }
938
939    camera3_stream_t *stream = buffer_set->stream;
940    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
941
942    //set the buffer_set in the mStreamInfo array
943    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
944            it != mStreamInfo.end(); it++) {
945        if ((*it)->stream == stream) {
946            uint32_t numBuffers = buffer_set->num_buffers;
947            (*it)->buffer_set.stream = buffer_set->stream;
948            (*it)->buffer_set.num_buffers = numBuffers;
949            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
950            if ((*it)->buffer_set.buffers == NULL) {
951                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
952                pthread_mutex_unlock(&mMutex);
953                return -ENOMEM;
954            }
955            for (size_t j = 0; j < numBuffers; j++){
956                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
957            }
958            (*it)->registered = 1;
959        }
960    }
961    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
962    if (rc < 0) {
963        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
964        pthread_mutex_unlock(&mMutex);
965        return -ENODEV;
966    }
967
968    pthread_mutex_unlock(&mMutex);
969    return NO_ERROR;
970}
971
972/*===========================================================================
973 * FUNCTION   : processCaptureRequest
974 *
975 * DESCRIPTION: process a capture request from camera service
976 *
977 * PARAMETERS :
978 *   @request : request from framework to process
979 *
980 * RETURN     :
981 *
982 *==========================================================================*/
983int QCamera3HardwareInterface::processCaptureRequest(
984                    camera3_capture_request_t *request)
985{
986    int rc = NO_ERROR;
987    int32_t request_id;
988    CameraMetadata meta;
989    MetadataBufferInfo reproc_meta;
990    int queueMetadata = 0;
991
992    pthread_mutex_lock(&mMutex);
993
994    rc = validateCaptureRequest(request);
995    if (rc != NO_ERROR) {
996        ALOGE("%s: incoming request is not valid", __func__);
997        pthread_mutex_unlock(&mMutex);
998        return rc;
999    }
1000
1001    meta = request->settings;
1002
1003    // For first capture request, send capture intent, and
1004    // stream on all streams
1005    if (mFirstRequest) {
1006
1007        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1008            int32_t hal_version = CAM_HAL_V3;
1009            uint8_t captureIntent =
1010                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1011
1012            memset(mParameters, 0, sizeof(parm_buffer_t));
1013            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1015                sizeof(hal_version), &hal_version);
1016            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1017                sizeof(captureIntent), &captureIntent);
1018            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1019                mParameters);
1020        }
1021
1022        mMetadataChannel->start();
1023        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1024            it != mStreamInfo.end(); it++) {
1025            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1026            channel->start();
1027        }
1028    }
1029
1030    uint32_t frameNumber = request->frame_number;
1031    uint32_t streamTypeMask = 0;
1032
1033    if (meta.exists(ANDROID_REQUEST_ID)) {
1034        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1035        mCurrentRequestId = request_id;
1036        ALOGV("%s: Received request with id: %d",__func__, request_id);
1037    } else if (mFirstRequest || mCurrentRequestId == -1){
1038        ALOGE("%s: Unable to find request id field, \
1039                & no previous id available", __func__);
1040        return NAME_NOT_FOUND;
1041    } else {
1042        ALOGV("%s: Re-using old request id", __func__);
1043        request_id = mCurrentRequestId;
1044    }
1045
1046    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1047                                    __func__, __LINE__,
1048                                    request->num_output_buffers,
1049                                    request->input_buffer,
1050                                    frameNumber);
1051    // Acquire all request buffers first
1052    int blob_request = 0;
1053    for (size_t i = 0; i < request->num_output_buffers; i++) {
1054        const camera3_stream_buffer_t& output = request->output_buffers[i];
1055        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1056        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1057
1058        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1059        //Call function to store local copy of jpeg data for encode params.
1060            blob_request = 1;
1061            rc = getJpegSettings(request->settings);
1062            if (rc < 0) {
1063                ALOGE("%s: failed to get jpeg parameters", __func__);
1064                pthread_mutex_unlock(&mMutex);
1065                return rc;
1066            }
1067        }
1068
1069        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1070        if (rc != OK) {
1071            ALOGE("%s: fence wait failed %d", __func__, rc);
1072            pthread_mutex_unlock(&mMutex);
1073            return rc;
1074        }
1075        streamTypeMask |= channel->getStreamTypeMask();
1076    }
1077
1078    rc = setFrameParameters(request, streamTypeMask);
1079    if (rc < 0) {
1080        ALOGE("%s: fail to set frame parameters", __func__);
1081        pthread_mutex_unlock(&mMutex);
1082        return rc;
1083    }
1084
1085    /* Update pending request list and pending buffers map */
1086    PendingRequestInfo pendingRequest;
1087    pendingRequest.frame_number = frameNumber;
1088    pendingRequest.num_buffers = request->num_output_buffers;
1089    pendingRequest.request_id = request_id;
1090    pendingRequest.blob_request = blob_request;
1091    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1092
1093    for (size_t i = 0; i < request->num_output_buffers; i++) {
1094        RequestedBufferInfo requestedBuf;
1095        requestedBuf.stream = request->output_buffers[i].stream;
1096        requestedBuf.buffer = NULL;
1097        pendingRequest.buffers.push_back(requestedBuf);
1098
1099        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1100    }
1101    mPendingRequestsList.push_back(pendingRequest);
1102
1103    // Notify metadata channel we receive a request
1104    mMetadataChannel->request(NULL, frameNumber);
1105
1106    // Call request on other streams
1107    for (size_t i = 0; i < request->num_output_buffers; i++) {
1108        const camera3_stream_buffer_t& output = request->output_buffers[i];
1109        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1110        mm_camera_buf_def_t *pInputBuffer = NULL;
1111
1112        if (channel == NULL) {
1113            ALOGE("%s: invalid channel pointer for stream", __func__);
1114            continue;
1115        }
1116
1117        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1118            QCamera3RegularChannel* inputChannel = NULL;
1119            if(request->input_buffer != NULL){
1120                //Try to get the internal format
1121                inputChannel = (QCamera3RegularChannel*)
1122                    request->input_buffer->stream->priv;
1123                if(inputChannel == NULL ){
1124                    ALOGE("%s: failed to get input channel handle", __func__);
1125                } else {
1126                    pInputBuffer =
1127                        inputChannel->getInternalFormatBuffer(
1128                                request->input_buffer->buffer);
1129                    ALOGD("%s: Input buffer dump",__func__);
1130                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1131                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1132                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1133                    ALOGD("Handle:%p", request->input_buffer->buffer);
1134                    //TODO: need to get corresponding metadata and send it to pproc
1135                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1136                         m != mStoredMetadataList.end(); m++) {
1137                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1138                            reproc_meta.meta_buf = m->meta_buf;
1139                            queueMetadata = 1;
1140                            break;
1141                        }
1142                    }
1143                }
1144            }
1145            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1146                            pInputBuffer,(QCamera3Channel*)inputChannel);
1147            if (queueMetadata) {
1148                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1149            }
1150        } else {
1151            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1152                __LINE__, output.buffer, frameNumber);
1153            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1154                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1155                     m != mStoredMetadataList.end(); m++) {
1156                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1157                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1158                            mMetadataChannel->bufDone(m->meta_buf);
1159                            free(m->meta_buf);
1160                            m = mStoredMetadataList.erase(m);
1161                            break;
1162                        }
1163                   }
1164                }
1165            }
1166            rc = channel->request(output.buffer, frameNumber);
1167        }
1168        if (rc < 0)
1169            ALOGE("%s: request failed", __func__);
1170    }
1171
1172    mFirstRequest = false;
1173    // Added a timed condition wait
1174    struct timespec ts;
1175    uint8_t isValidTimeout = 1;
1176    rc = clock_gettime(CLOCK_REALTIME, &ts);
1177    if (rc < 0) {
1178        isValidTimeout = 0;
1179        ALOGE("%s: Error reading the real time clock!!", __func__);
1180    }
1181    else {
1182        // Make timeout as 5 sec for request to be honored
1183        ts.tv_sec += 5;
1184    }
1185    //Block on conditional variable
1186    mPendingRequest = 1;
1187    while (mPendingRequest == 1) {
1188        if (!isValidTimeout) {
1189            ALOGV("%s: Blocking on conditional wait", __func__);
1190            pthread_cond_wait(&mRequestCond, &mMutex);
1191        }
1192        else {
1193            ALOGV("%s: Blocking on timed conditional wait", __func__);
1194            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1195            if (rc == ETIMEDOUT) {
1196                rc = -ENODEV;
1197                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1198                break;
1199            }
1200        }
1201        ALOGV("%s: Unblocked", __func__);
1202    }
1203
1204    pthread_mutex_unlock(&mMutex);
1205    return rc;
1206}
1207
1208/*===========================================================================
1209 * FUNCTION   : getMetadataVendorTagOps
1210 *
1211 * DESCRIPTION:
1212 *
1213 * PARAMETERS :
1214 *
1215 *
1216 * RETURN     :
1217 *==========================================================================*/
1218void QCamera3HardwareInterface::getMetadataVendorTagOps(
1219                    vendor_tag_query_ops_t* /*ops*/)
1220{
1221    /* Enable locks when we eventually add Vendor Tags */
1222    /*
1223    pthread_mutex_lock(&mMutex);
1224
1225    pthread_mutex_unlock(&mMutex);
1226    */
1227    return;
1228}
1229
1230/*===========================================================================
1231 * FUNCTION   : dump
1232 *
1233 * DESCRIPTION:
1234 *
1235 * PARAMETERS :
1236 *
1237 *
1238 * RETURN     :
1239 *==========================================================================*/
1240void QCamera3HardwareInterface::dump(int /*fd*/)
1241{
1242    /*Enable lock when we implement this function*/
1243    /*
1244    pthread_mutex_lock(&mMutex);
1245
1246    pthread_mutex_unlock(&mMutex);
1247    */
1248    return;
1249}
1250
1251/*===========================================================================
1252 * FUNCTION   : flush
1253 *
1254 * DESCRIPTION:
1255 *
1256 * PARAMETERS :
1257 *
1258 *
1259 * RETURN     :
1260 *==========================================================================*/
1261int QCamera3HardwareInterface::flush()
1262{
1263    /*Enable lock when we implement this function*/
1264    /*
1265    pthread_mutex_lock(&mMutex);
1266
1267    pthread_mutex_unlock(&mMutex);
1268    */
1269    return 0;
1270}
1271
1272/*===========================================================================
1273 * FUNCTION   : captureResultCb
1274 *
1275 * DESCRIPTION: Callback handler for all capture result
1276 *              (streams, as well as metadata)
1277 *
1278 * PARAMETERS :
1279 *   @metadata : metadata information
1280 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1281 *               NULL if metadata.
1282 *
1283 * RETURN     : NONE
1284 *==========================================================================*/
1285void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1286                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1287{
1288    pthread_mutex_lock(&mMutex);
1289
1290    if (metadata_buf) {
1291        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1292        int32_t frame_number_valid = *(int32_t *)
1293            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1294        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1295            CAM_INTF_META_PENDING_REQUESTS, metadata);
1296        uint32_t frame_number = *(uint32_t *)
1297            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1298        const struct timeval *tv = (const struct timeval *)
1299            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1300        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1301            tv->tv_usec * NSEC_PER_USEC;
1302
1303        if (!frame_number_valid) {
1304            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1305            mMetadataChannel->bufDone(metadata_buf);
1306            goto done_metadata;
1307        }
1308        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1309                frame_number, capture_time);
1310
1311        // Go through the pending requests info and send shutter/results to frameworks
1312        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1313                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1314            camera3_capture_result_t result;
1315            camera3_notify_msg_t notify_msg;
1316            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1317
1318            // Flush out all entries with less or equal frame numbers.
1319
1320            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1321            //Right now it's the same as metadata timestamp
1322
1323            //TODO: When there is metadata drop, how do we derive the timestamp of
1324            //dropped frames? For now, we fake the dropped timestamp by substracting
1325            //from the reported timestamp
1326            nsecs_t current_capture_time = capture_time -
1327                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1328
1329            // Send shutter notify to frameworks
1330            notify_msg.type = CAMERA3_MSG_SHUTTER;
1331            notify_msg.message.shutter.frame_number = i->frame_number;
1332            notify_msg.message.shutter.timestamp = current_capture_time;
1333            mCallbackOps->notify(mCallbackOps, &notify_msg);
1334            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1335                    i->frame_number, capture_time);
1336
1337            // Send empty metadata with already filled buffers for dropped metadata
1338            // and send valid metadata with already filled buffers for current metadata
1339            if (i->frame_number < frame_number) {
1340                CameraMetadata dummyMetadata;
1341                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1342                        &current_capture_time, 1);
1343                dummyMetadata.update(ANDROID_REQUEST_ID,
1344                        &(i->request_id), 1);
1345                result.result = dummyMetadata.release();
1346            } else {
1347                result.result = translateCbMetadataToResultMetadata(metadata,
1348                        current_capture_time, i->request_id);
1349                if (mIsZslMode) {
1350                   int found_metadata = 0;
1351                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1352                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1353                        j != i->buffers.end(); j++) {
1354                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1355                         //check if corresp. zsl already exists in the stored metadata list
1356                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1357                               m != mStoredMetadataList.begin(); m++) {
1358                            if (m->frame_number == frame_number) {
1359                               m->meta_buf = metadata_buf;
1360                               found_metadata = 1;
1361                               break;
1362                            }
1363                         }
1364                         if (!found_metadata) {
1365                            MetadataBufferInfo store_meta_info;
1366                            store_meta_info.meta_buf = metadata_buf;
1367                            store_meta_info.frame_number = frame_number;
1368                            mStoredMetadataList.push_back(store_meta_info);
1369                            found_metadata = 1;
1370                         }
1371                      }
1372                   }
1373                   if (!found_metadata) {
1374                       if (!i->input_buffer_present && i->blob_request) {
1375                          //livesnapshot or fallback non-zsl snapshot case
1376                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1377                                j != i->buffers.end(); j++){
1378                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1379                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1380                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1381                                 break;
1382                              }
1383                         }
1384                       } else {
1385                            //return the metadata immediately
1386                            mMetadataChannel->bufDone(metadata_buf);
1387                            free(metadata_buf);
1388                       }
1389                   }
1390               } else if (!mIsZslMode && i->blob_request) {
1391                   //If it is a blob request then send the metadata to the picture channel
1392                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1393               } else {
1394                   // Return metadata buffer
1395                   mMetadataChannel->bufDone(metadata_buf);
1396                   free(metadata_buf);
1397               }
1398
1399            }
1400            if (!result.result) {
1401                ALOGE("%s: metadata is NULL", __func__);
1402            }
1403            result.frame_number = i->frame_number;
1404            result.num_output_buffers = 0;
1405            result.output_buffers = NULL;
1406            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1407                    j != i->buffers.end(); j++) {
1408                if (j->buffer) {
1409                    result.num_output_buffers++;
1410                }
1411            }
1412
1413            if (result.num_output_buffers > 0) {
1414                camera3_stream_buffer_t *result_buffers =
1415                    new camera3_stream_buffer_t[result.num_output_buffers];
1416                if (!result_buffers) {
1417                    ALOGE("%s: Fatal error: out of memory", __func__);
1418                }
1419                size_t result_buffers_idx = 0;
1420                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1421                        j != i->buffers.end(); j++) {
1422                    if (j->buffer) {
1423                        result_buffers[result_buffers_idx++] = *(j->buffer);
1424                        free(j->buffer);
1425                        j->buffer = NULL;
1426                        mPendingBuffersMap.editValueFor(j->stream)--;
1427                    }
1428                }
1429                result.output_buffers = result_buffers;
1430
1431                mCallbackOps->process_capture_result(mCallbackOps, &result);
1432                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1433                        __func__, result.frame_number, current_capture_time);
1434                free_camera_metadata((camera_metadata_t *)result.result);
1435                delete[] result_buffers;
1436            } else {
1437                mCallbackOps->process_capture_result(mCallbackOps, &result);
1438                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1439                        __func__, result.frame_number, current_capture_time);
1440                free_camera_metadata((camera_metadata_t *)result.result);
1441            }
1442            // erase the element from the list
1443            i = mPendingRequestsList.erase(i);
1444        }
1445
1446
1447done_metadata:
1448        bool max_buffers_dequeued = false;
1449        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1450            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1451            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1452            if (queued_buffers == stream->max_buffers) {
1453                max_buffers_dequeued = true;
1454                break;
1455            }
1456        }
1457        if (!max_buffers_dequeued && !pending_requests) {
1458            // Unblock process_capture_request
1459            mPendingRequest = 0;
1460            pthread_cond_signal(&mRequestCond);
1461        }
1462    } else {
1463        // If the frame number doesn't exist in the pending request list,
1464        // directly send the buffer to the frameworks, and update pending buffers map
1465        // Otherwise, book-keep the buffer.
1466        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1467        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1468            i++;
1469        }
1470        if (i == mPendingRequestsList.end()) {
1471            // Verify all pending requests frame_numbers are greater
1472            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1473                    j != mPendingRequestsList.end(); j++) {
1474                if (j->frame_number < frame_number) {
1475                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1476                            __func__, j->frame_number, frame_number);
1477                }
1478            }
1479            camera3_capture_result_t result;
1480            result.result = NULL;
1481            result.frame_number = frame_number;
1482            result.num_output_buffers = 1;
1483            result.output_buffers = buffer;
1484            ALOGV("%s: result frame_number = %d, buffer = %p",
1485                    __func__, frame_number, buffer);
1486            mPendingBuffersMap.editValueFor(buffer->stream)--;
1487            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1488                int found = 0;
1489                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1490                      k != mStoredMetadataList.end(); k++) {
1491                    if (k->frame_number == frame_number) {
1492                        k->zsl_buf_hdl = buffer->buffer;
1493                        found = 1;
1494                        break;
1495                    }
1496                }
1497                if (!found) {
1498                   MetadataBufferInfo meta_info;
1499                   meta_info.frame_number = frame_number;
1500                   meta_info.zsl_buf_hdl = buffer->buffer;
1501                   mStoredMetadataList.push_back(meta_info);
1502                }
1503            }
1504            mCallbackOps->process_capture_result(mCallbackOps, &result);
1505        } else {
1506            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1507                    j != i->buffers.end(); j++) {
1508                if (j->stream == buffer->stream) {
1509                    if (j->buffer != NULL) {
1510                        ALOGE("%s: Error: buffer is already set", __func__);
1511                    } else {
1512                        j->buffer = (camera3_stream_buffer_t *)malloc(
1513                                sizeof(camera3_stream_buffer_t));
1514                        *(j->buffer) = *buffer;
1515                        ALOGV("%s: cache buffer %p at result frame_number %d",
1516                                __func__, buffer, frame_number);
1517                    }
1518                }
1519            }
1520        }
1521    }
1522    pthread_mutex_unlock(&mMutex);
1523    return;
1524}
1525
1526/*===========================================================================
1527 * FUNCTION   : translateCbMetadataToResultMetadata
1528 *
1529 * DESCRIPTION:
1530 *
1531 * PARAMETERS :
1532 *   @metadata : metadata information from callback
1533 *
1534 * RETURN     : camera_metadata_t*
1535 *              metadata in a format specified by fwk
1536 *==========================================================================*/
1537camera_metadata_t*
1538QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1539                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1540                                 int32_t request_id)
1541{
1542    CameraMetadata camMetadata;
1543    camera_metadata_t* resultMetadata;
1544
1545    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1546    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1547
1548    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1549    uint8_t next_entry;
1550    while (curr_entry != CAM_INTF_PARM_MAX) {
1551       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1552       switch (curr_entry) {
1553         case CAM_INTF_META_FACE_DETECTION:{
1554             cam_face_detection_data_t *faceDetectionInfo =
1555                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1556             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1557             int32_t faceIds[numFaces];
1558             uint8_t faceScores[numFaces];
1559             int32_t faceRectangles[numFaces * 4];
1560             int32_t faceLandmarks[numFaces * 6];
1561             int j = 0, k = 0;
1562             for (int i = 0; i < numFaces; i++) {
1563                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1564                 faceScores[i] = faceDetectionInfo->faces[i].score;
1565                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1566                         faceRectangles+j, -1);
1567                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1568                 j+= 4;
1569                 k+= 6;
1570             }
1571             if (numFaces > 0) {
1572                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1573                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1574                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1575                     faceRectangles, numFaces*4);
1576                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1577                     faceLandmarks, numFaces*6);
1578             }
1579            break;
1580            }
1581         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1582             uint8_t  *color_correct_mode =
1583                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1584             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1585             break;
1586          }
1587         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1588             int32_t  *ae_precapture_id =
1589                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1590             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1591             break;
1592          }
1593         case CAM_INTF_META_AEC_ROI: {
1594            cam_area_t  *hAeRegions =
1595                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1596             int32_t aeRegions[5];
1597             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1598             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1599             break;
1600          }
1601          case CAM_INTF_META_AEC_STATE:{
1602             uint8_t *ae_state =
1603                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1604             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1605             break;
1606          }
1607          case CAM_INTF_PARM_FOCUS_MODE:{
1608             uint8_t  *focusMode =
1609                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1610             camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1611             break;
1612          }
1613          case CAM_INTF_META_AF_ROI:{
1614             /*af regions*/
1615             cam_area_t  *hAfRegions =
1616                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1617             int32_t afRegions[5];
1618             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1619             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1620             break;
1621          }
1622          case CAM_INTF_META_AF_STATE: {
1623             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1624             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1625             break;
1626          }
1627          case CAM_INTF_META_AF_TRIGGER_ID: {
1628             int32_t  *afTriggerId =
1629                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1630             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1631             break;
1632          }
1633          case CAM_INTF_PARM_WHITE_BALANCE: {
1634               uint8_t  *whiteBalance =
1635                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1636               camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1637               break;
1638          }
1639          case CAM_INTF_META_AWB_REGIONS: {
1640             /*awb regions*/
1641             cam_area_t  *hAwbRegions =
1642                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1643             int32_t awbRegions[5];
1644             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1645             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1646             break;
1647          }
1648          case CAM_INTF_META_AWB_STATE: {
1649             uint8_t  *whiteBalanceState =
1650                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1651             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1652             break;
1653          }
1654          case CAM_INTF_META_MODE: {
1655             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1656             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1657             break;
1658          }
1659          case CAM_INTF_META_EDGE_MODE: {
1660             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1661             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1662             break;
1663          }
1664          case CAM_INTF_META_FLASH_POWER: {
1665             uint8_t  *flashPower =
1666                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1667             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1668             break;
1669          }
1670          case CAM_INTF_META_FLASH_FIRING_TIME: {
1671             int64_t  *flashFiringTime =
1672                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1673             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1674             break;
1675          }
1676          case CAM_INTF_META_FLASH_STATE: {
1677             uint8_t  *flashState =
1678                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1679             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1680             break;
1681          }
1682          case CAM_INTF_META_HOTPIXEL_MODE: {
1683              uint8_t  *hotPixelMode =
1684                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1685              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1686              break;
1687          }
1688          case CAM_INTF_META_LENS_APERTURE:{
1689             float  *lensAperture =
1690                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1691             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1692             break;
1693          }
1694          case CAM_INTF_META_LENS_FILTERDENSITY: {
1695             float  *filterDensity =
1696                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1697             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1698             break;
1699          }
1700          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1701             float  *focalLength =
1702                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1703             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1704             break;
1705          }
1706          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1707             float  *focusDistance =
1708                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1709             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1710             break;
1711          }
1712          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1713             float  *focusRange =
1714                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1715             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1716          }
1717          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1718             uint8_t  *opticalStab =
1719                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1720             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1721          }
1722          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1723             uint8_t  *noiseRedMode =
1724                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1725             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1726             break;
1727          }
1728          case CAM_INTF_META_SCALER_CROP_REGION: {
1729             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1730             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1731             int32_t scalerCropRegion[4];
1732             scalerCropRegion[0] = hScalerCropRegion->left;
1733             scalerCropRegion[1] = hScalerCropRegion->top;
1734             scalerCropRegion[2] = hScalerCropRegion->width;
1735             scalerCropRegion[3] = hScalerCropRegion->height;
1736             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1737             break;
1738          }
1739          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1740             int64_t  *sensorExpTime =
1741                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1742             mMetadataResponse.exposure_time = *sensorExpTime;
1743             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1744             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1745             break;
1746          }
1747          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1748             int64_t  *sensorFameDuration =
1749                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1750             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1751             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1752             break;
1753          }
1754          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1755             int32_t  *sensorSensitivity =
1756                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1757             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1758             mMetadataResponse.iso_speed = *sensorSensitivity;
1759             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1760             break;
1761          }
1762          case CAM_INTF_META_SHADING_MODE: {
1763             uint8_t  *shadingMode =
1764                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1765             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1766             break;
1767          }
1768          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1769             uint8_t  *faceDetectMode =
1770                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1771             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1772                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1773                                                        *faceDetectMode);
1774             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1775             break;
1776          }
1777          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1778             uint8_t  *histogramMode =
1779                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1780             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1781             break;
1782          }
1783          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1784               uint8_t  *sharpnessMapMode =
1785                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1786               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1787                                  sharpnessMapMode, 1);
1788               break;
1789           }
1790          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1791               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1792               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1793               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1794                                  (int32_t*)sharpnessMap->sharpness,
1795                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1796               break;
1797          }
1798          case CAM_INTF_META_LENS_SHADING_MAP: {
1799               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1800               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1801               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1802               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1803               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1804                                  (float*)lensShadingMap->lens_shading,
1805                                  4*map_width*map_height);
1806               break;
1807          }
1808          case CAM_INTF_META_TONEMAP_CURVES:{
1809             //Populate CAM_INTF_META_TONEMAP_CURVES
1810             /* ch0 = G, ch 1 = B, ch 2 = R*/
1811             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1812             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1813             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1814                                (float*)tonemap->curves[0].tonemap_points,
1815                                tonemap->tonemap_points_cnt * 2);
1816
1817             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1818                                (float*)tonemap->curves[1].tonemap_points,
1819                                tonemap->tonemap_points_cnt * 2);
1820
1821             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1822                                (float*)tonemap->curves[2].tonemap_points,
1823                                tonemap->tonemap_points_cnt * 2);
1824             break;
1825          }
1826          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1827             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1828             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1829             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1830             break;
1831          }
1832          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1833              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1834              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1835              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1836                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1837              break;
1838          }
1839          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1840             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1841             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1842             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1843                       predColorCorrectionGains->gains, 4);
1844             break;
1845          }
1846          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1847             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1848                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1849             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1850                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1851             break;
1852
1853          }
1854          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1855             uint8_t *blackLevelLock = (uint8_t*)
1856               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1857             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1858             break;
1859          }
1860          case CAM_INTF_META_SCENE_FLICKER:{
1861             uint8_t *sceneFlicker = (uint8_t*)
1862             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1863             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1864             break;
1865          }
1866          case CAM_INTF_PARM_LED_MODE:
1867             break;
1868          default:
1869             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1870                   __func__, curr_entry);
1871             break;
1872       }
1873       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1874       curr_entry = next_entry;
1875    }
1876    resultMetadata = camMetadata.release();
1877    return resultMetadata;
1878}
1879
1880/*===========================================================================
1881 * FUNCTION   : convertToRegions
1882 *
1883 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1884 *
1885 * PARAMETERS :
1886 *   @rect   : cam_rect_t struct to convert
1887 *   @region : int32_t destination array
1888 *   @weight : if we are converting from cam_area_t, weight is valid
1889 *             else weight = -1
1890 *
1891 *==========================================================================*/
1892void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1893    region[0] = rect.left;
1894    region[1] = rect.top;
1895    region[2] = rect.left + rect.width;
1896    region[3] = rect.top + rect.height;
1897    if (weight > -1) {
1898        region[4] = weight;
1899    }
1900}
1901
1902/*===========================================================================
1903 * FUNCTION   : convertFromRegions
1904 *
1905 * DESCRIPTION: helper method to convert from array to cam_rect_t
1906 *
1907 * PARAMETERS :
1908 *   @rect   : cam_rect_t struct to convert
1909 *   @region : int32_t destination array
1910 *   @weight : if we are converting from cam_area_t, weight is valid
1911 *             else weight = -1
1912 *
1913 *==========================================================================*/
1914void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1915                                                   const camera_metadata_t *settings,
1916                                                   uint32_t tag){
1917    CameraMetadata frame_settings;
1918    frame_settings = settings;
1919    int32_t x_min = frame_settings.find(tag).data.i32[0];
1920    int32_t y_min = frame_settings.find(tag).data.i32[1];
1921    int32_t x_max = frame_settings.find(tag).data.i32[2];
1922    int32_t y_max = frame_settings.find(tag).data.i32[3];
1923    roi->weight = frame_settings.find(tag).data.i32[4];
1924    roi->rect.left = x_min;
1925    roi->rect.top = y_min;
1926    roi->rect.width = x_max - x_min;
1927    roi->rect.height = y_max - y_min;
1928}
1929
1930/*===========================================================================
1931 * FUNCTION   : resetIfNeededROI
1932 *
1933 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1934 *              crop region
1935 *
1936 * PARAMETERS :
1937 *   @roi       : cam_area_t struct to resize
1938 *   @scalerCropRegion : cam_crop_region_t region to compare against
1939 *
1940 *
1941 *==========================================================================*/
1942bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1943                                                 const cam_crop_region_t* scalerCropRegion)
1944{
1945    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1946    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1947    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1948    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1949    if ((roi_x_max < scalerCropRegion->left) ||
1950        (roi_y_max < scalerCropRegion->top)  ||
1951        (roi->rect.left > crop_x_max) ||
1952        (roi->rect.top > crop_y_max)){
1953        return false;
1954    }
1955    if (roi->rect.left < scalerCropRegion->left) {
1956        roi->rect.left = scalerCropRegion->left;
1957    }
1958    if (roi->rect.top < scalerCropRegion->top) {
1959        roi->rect.top = scalerCropRegion->top;
1960    }
1961    if (roi_x_max > crop_x_max) {
1962        roi_x_max = crop_x_max;
1963    }
1964    if (roi_y_max > crop_y_max) {
1965        roi_y_max = crop_y_max;
1966    }
1967    roi->rect.width = roi_x_max - roi->rect.left;
1968    roi->rect.height = roi_y_max - roi->rect.top;
1969    return true;
1970}
1971
1972/*===========================================================================
1973 * FUNCTION   : convertLandmarks
1974 *
1975 * DESCRIPTION: helper method to extract the landmarks from face detection info
1976 *
1977 * PARAMETERS :
1978 *   @face   : cam_rect_t struct to convert
1979 *   @landmarks : int32_t destination array
1980 *
1981 *
1982 *==========================================================================*/
1983void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1984{
1985    landmarks[0] = face.left_eye_center.x;
1986    landmarks[1] = face.left_eye_center.y;
1987    landmarks[2] = face.right_eye_center.y;
1988    landmarks[3] = face.right_eye_center.y;
1989    landmarks[4] = face.mouth_center.x;
1990    landmarks[5] = face.mouth_center.y;
1991}
1992
1993#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1994/*===========================================================================
1995 * FUNCTION   : initCapabilities
1996 *
1997 * DESCRIPTION: initialize camera capabilities in static data struct
1998 *
1999 * PARAMETERS :
2000 *   @cameraId  : camera Id
2001 *
2002 * RETURN     : int32_t type of status
2003 *              NO_ERROR  -- success
2004 *              none-zero failure code
2005 *==========================================================================*/
2006int QCamera3HardwareInterface::initCapabilities(int cameraId)
2007{
2008    int rc = 0;
2009    mm_camera_vtbl_t *cameraHandle = NULL;
2010    QCamera3HeapMemory *capabilityHeap = NULL;
2011
2012    cameraHandle = camera_open(cameraId);
2013    if (!cameraHandle) {
2014        ALOGE("%s: camera_open failed", __func__);
2015        rc = -1;
2016        goto open_failed;
2017    }
2018
2019    capabilityHeap = new QCamera3HeapMemory();
2020    if (capabilityHeap == NULL) {
2021        ALOGE("%s: creation of capabilityHeap failed", __func__);
2022        goto heap_creation_failed;
2023    }
2024    /* Allocate memory for capability buffer */
2025    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2026    if(rc != OK) {
2027        ALOGE("%s: No memory for cappability", __func__);
2028        goto allocate_failed;
2029    }
2030
2031    /* Map memory for capability buffer */
2032    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2033    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2034                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2035                                capabilityHeap->getFd(0),
2036                                sizeof(cam_capability_t));
2037    if(rc < 0) {
2038        ALOGE("%s: failed to map capability buffer", __func__);
2039        goto map_failed;
2040    }
2041
2042    /* Query Capability */
2043    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2044    if(rc < 0) {
2045        ALOGE("%s: failed to query capability",__func__);
2046        goto query_failed;
2047    }
2048    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2049    if (!gCamCapability[cameraId]) {
2050        ALOGE("%s: out of memory", __func__);
2051        goto query_failed;
2052    }
2053    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2054                                        sizeof(cam_capability_t));
2055    rc = 0;
2056
2057query_failed:
2058    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2059                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2060map_failed:
2061    capabilityHeap->deallocate();
2062allocate_failed:
2063    delete capabilityHeap;
2064heap_creation_failed:
2065    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2066    cameraHandle = NULL;
2067open_failed:
2068    return rc;
2069}
2070
2071/*===========================================================================
2072 * FUNCTION   : initParameters
2073 *
2074 * DESCRIPTION: initialize camera parameters
2075 *
2076 * PARAMETERS :
2077 *
2078 * RETURN     : int32_t type of status
2079 *              NO_ERROR  -- success
2080 *              none-zero failure code
2081 *==========================================================================*/
2082int QCamera3HardwareInterface::initParameters()
2083{
2084    int rc = 0;
2085
2086    //Allocate Set Param Buffer
2087    mParamHeap = new QCamera3HeapMemory();
2088    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2089    if(rc != OK) {
2090        rc = NO_MEMORY;
2091        ALOGE("Failed to allocate SETPARM Heap memory");
2092        delete mParamHeap;
2093        mParamHeap = NULL;
2094        return rc;
2095    }
2096
2097    //Map memory for parameters buffer
2098    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2099            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2100            mParamHeap->getFd(0),
2101            sizeof(parm_buffer_t));
2102    if(rc < 0) {
2103        ALOGE("%s:failed to map SETPARM buffer",__func__);
2104        rc = FAILED_TRANSACTION;
2105        mParamHeap->deallocate();
2106        delete mParamHeap;
2107        mParamHeap = NULL;
2108        return rc;
2109    }
2110
2111    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2112    return rc;
2113}
2114
2115/*===========================================================================
2116 * FUNCTION   : deinitParameters
2117 *
2118 * DESCRIPTION: de-initialize camera parameters
2119 *
2120 * PARAMETERS :
2121 *
2122 * RETURN     : NONE
2123 *==========================================================================*/
2124void QCamera3HardwareInterface::deinitParameters()
2125{
2126    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2127            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2128
2129    mParamHeap->deallocate();
2130    delete mParamHeap;
2131    mParamHeap = NULL;
2132
2133    mParameters = NULL;
2134}
2135
2136/*===========================================================================
2137 * FUNCTION   : calcMaxJpegSize
2138 *
2139 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2140 *
2141 * PARAMETERS :
2142 *
2143 * RETURN     : max_jpeg_size
2144 *==========================================================================*/
2145int QCamera3HardwareInterface::calcMaxJpegSize()
2146{
2147    int32_t max_jpeg_size = 0;
2148    int temp_width, temp_height;
2149    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2150        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2151        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2152        if (temp_width * temp_height > max_jpeg_size ) {
2153            max_jpeg_size = temp_width * temp_height;
2154        }
2155    }
2156    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2157    return max_jpeg_size;
2158}
2159
2160/*===========================================================================
2161 * FUNCTION   : initStaticMetadata
2162 *
2163 * DESCRIPTION: initialize the static metadata
2164 *
2165 * PARAMETERS :
2166 *   @cameraId  : camera Id
2167 *
2168 * RETURN     : int32_t type of status
2169 *              0  -- success
2170 *              non-zero failure code
2171 *==========================================================================*/
2172int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2173{
2174    int rc = 0;
2175    CameraMetadata staticInfo;
2176
2177    /* android.info: hardware level */
2178    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2179    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2180        &supportedHardwareLevel, 1);
2181
2182    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2183    /*HAL 3 only*/
2184    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2185                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2186
2187    /*hard coded for now but this should come from sensor*/
2188    float min_focus_distance;
2189    if(facingBack){
2190        min_focus_distance = 10;
2191    } else {
2192        min_focus_distance = 0;
2193    }
2194    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2195                    &min_focus_distance, 1);
2196
2197    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2198                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2199
2200    /*should be using focal lengths but sensor doesn't provide that info now*/
2201    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2202                      &gCamCapability[cameraId]->focal_length,
2203                      1);
2204
2205    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2206                      gCamCapability[cameraId]->apertures,
2207                      gCamCapability[cameraId]->apertures_count);
2208
2209    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2210                gCamCapability[cameraId]->filter_densities,
2211                gCamCapability[cameraId]->filter_densities_count);
2212
2213
2214    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2215                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2216                      gCamCapability[cameraId]->optical_stab_modes_count);
2217
2218    staticInfo.update(ANDROID_LENS_POSITION,
2219                      gCamCapability[cameraId]->lens_position,
2220                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2221
2222    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2223                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2224    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2225                      lens_shading_map_size,
2226                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2227
2228    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2229                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2230    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2231            geo_correction_map_size,
2232            sizeof(geo_correction_map_size)/sizeof(int32_t));
2233
2234    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2235                       gCamCapability[cameraId]->geo_correction_map,
2236                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2237
2238    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2239            gCamCapability[cameraId]->sensor_physical_size, 2);
2240
2241    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2242            gCamCapability[cameraId]->exposure_time_range, 2);
2243
2244    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2245            &gCamCapability[cameraId]->max_frame_duration, 1);
2246
2247
2248    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2249                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2250
2251    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2252                                               gCamCapability[cameraId]->pixel_array_size.height};
2253    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2254                      pixel_array_size, 2);
2255
2256    int32_t active_array_size[] = {0, 0,
2257                                                gCamCapability[cameraId]->active_array_size.width,
2258                                                gCamCapability[cameraId]->active_array_size.height};
2259    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2260                      active_array_size, 4);
2261
2262    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2263            &gCamCapability[cameraId]->white_level, 1);
2264
2265    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2266            gCamCapability[cameraId]->black_level_pattern, 4);
2267
2268    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2269                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2270
2271    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2272                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2273
2274    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2275                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2276
2277    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2278                      &gCamCapability[cameraId]->histogram_size, 1);
2279
2280    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2281            &gCamCapability[cameraId]->max_histogram_count, 1);
2282
2283    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2284                                                gCamCapability[cameraId]->sharpness_map_size.height};
2285
2286    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2287            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2288
2289    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2290            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2291
2292
2293    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2294                      &gCamCapability[cameraId]->raw_min_duration,
2295                       1);
2296
2297    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2298                                                HAL_PIXEL_FORMAT_BLOB};
2299    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2300    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2301                      scalar_formats,
2302                      scalar_formats_count);
2303
2304    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2305    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2306              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2307              available_processed_sizes);
2308    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2309                available_processed_sizes,
2310                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2311
2312    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2313                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2314                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2315
2316    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2317    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2318                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2319                 available_fps_ranges);
2320    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2321            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2322
2323    camera_metadata_rational exposureCompensationStep = {
2324            gCamCapability[cameraId]->exp_compensation_step.numerator,
2325            gCamCapability[cameraId]->exp_compensation_step.denominator};
2326    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2327                      &exposureCompensationStep, 1);
2328
2329    /*TO DO*/
2330    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2331    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2332                      availableVstabModes, sizeof(availableVstabModes));
2333
2334    /*HAL 1 and HAL 3 common*/
2335    float maxZoom = 4;
2336    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2337            &maxZoom, 1);
2338
2339    int32_t max3aRegions = 1;
2340    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2341            &max3aRegions, 1);
2342
2343    uint8_t availableFaceDetectModes[] = {
2344            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2345            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2346    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2347                      availableFaceDetectModes,
2348                      sizeof(availableFaceDetectModes));
2349
2350    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2351                                       gCamCapability[cameraId]->raw_dim.height};
2352    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2353                      raw_size,
2354                      sizeof(raw_size)/sizeof(uint32_t));
2355
2356    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2357                                                        gCamCapability[cameraId]->exposure_compensation_max};
2358    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2359            exposureCompensationRange,
2360            sizeof(exposureCompensationRange)/sizeof(int32_t));
2361
2362    uint8_t lensFacing = (facingBack) ?
2363            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2364    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2365
2366    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2367                available_processed_sizes,
2368                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2369
2370    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2371                      available_thumbnail_sizes,
2372                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2373
2374    int32_t max_jpeg_size = 0;
2375    int temp_width, temp_height;
2376    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2377        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2378        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2379        if (temp_width * temp_height > max_jpeg_size ) {
2380            max_jpeg_size = temp_width * temp_height;
2381        }
2382    }
2383    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2384    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2385                      &max_jpeg_size, 1);
2386
2387    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2388    int32_t size = 0;
2389    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2390        int val = lookupFwkName(EFFECT_MODES_MAP,
2391                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2392                                   gCamCapability[cameraId]->supported_effects[i]);
2393        if (val != NAME_NOT_FOUND) {
2394            avail_effects[size] = (uint8_t)val;
2395            size++;
2396        }
2397    }
2398    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2399                      avail_effects,
2400                      size);
2401
2402    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2403    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2404    int32_t supported_scene_modes_cnt = 0;
2405    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2406        int val = lookupFwkName(SCENE_MODES_MAP,
2407                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2408                                gCamCapability[cameraId]->supported_scene_modes[i]);
2409        if (val != NAME_NOT_FOUND) {
2410            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2411            supported_indexes[supported_scene_modes_cnt] = i;
2412            supported_scene_modes_cnt++;
2413        }
2414    }
2415
2416    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2417                      avail_scene_modes,
2418                      supported_scene_modes_cnt);
2419
2420    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2421    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2422                      supported_scene_modes_cnt,
2423                      scene_mode_overrides,
2424                      supported_indexes,
2425                      cameraId);
2426    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2427                      scene_mode_overrides,
2428                      supported_scene_modes_cnt*3);
2429
2430    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2431    size = 0;
2432    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2433        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2434                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2435                                 gCamCapability[cameraId]->supported_antibandings[i]);
2436        if (val != NAME_NOT_FOUND) {
2437            avail_antibanding_modes[size] = (uint8_t)val;
2438            size++;
2439        }
2440
2441    }
2442    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2443                      avail_antibanding_modes,
2444                      size);
2445
2446    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2447    size = 0;
2448    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2449        int val = lookupFwkName(FOCUS_MODES_MAP,
2450                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2451                                gCamCapability[cameraId]->supported_focus_modes[i]);
2452        if (val != NAME_NOT_FOUND) {
2453            avail_af_modes[size] = (uint8_t)val;
2454            size++;
2455        }
2456    }
2457    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2458                      avail_af_modes,
2459                      size);
2460
2461    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2462    size = 0;
2463    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2464        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2465                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2466                                    gCamCapability[cameraId]->supported_white_balances[i]);
2467        if (val != NAME_NOT_FOUND) {
2468            avail_awb_modes[size] = (uint8_t)val;
2469            size++;
2470        }
2471    }
2472    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2473                      avail_awb_modes,
2474                      size);
2475
2476    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2477    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2478      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2479
2480    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2481            available_flash_levels,
2482            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2483
2484
2485    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2486    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2487            &flashAvailable, 1);
2488
2489    uint8_t avail_ae_modes[5];
2490    size = 0;
2491    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2492        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2493        size++;
2494    }
2495    if (flashAvailable) {
2496        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2497        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2498        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2499    }
2500    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2501                      avail_ae_modes,
2502                      size);
2503
2504    int32_t sensitivity_range[2];
2505    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2506    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2507    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2508                      sensitivity_range,
2509                      sizeof(sensitivity_range) / sizeof(int32_t));
2510
2511    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2512                      &gCamCapability[cameraId]->max_analog_sensitivity,
2513                      1);
2514
2515    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2516                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2517                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2518
2519    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2520    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2521                      &sensor_orientation,
2522                      1);
2523
2524    int32_t max_output_streams[3] = {1, 3, 1};
2525    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2526                      max_output_streams,
2527                      3);
2528
2529    gStaticMetadata[cameraId] = staticInfo.release();
2530    return rc;
2531}
2532
2533/*===========================================================================
2534 * FUNCTION   : makeTable
2535 *
2536 * DESCRIPTION: make a table of sizes
2537 *
2538 * PARAMETERS :
2539 *
2540 *
2541 *==========================================================================*/
2542void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2543                                          int32_t* sizeTable)
2544{
2545    int j = 0;
2546    for (int i = 0; i < size; i++) {
2547        sizeTable[j] = dimTable[i].width;
2548        sizeTable[j+1] = dimTable[i].height;
2549        j+=2;
2550    }
2551}
2552
2553/*===========================================================================
2554 * FUNCTION   : makeFPSTable
2555 *
2556 * DESCRIPTION: make a table of fps ranges
2557 *
2558 * PARAMETERS :
2559 *
2560 *==========================================================================*/
2561void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2562                                          int32_t* fpsRangesTable)
2563{
2564    int j = 0;
2565    for (int i = 0; i < size; i++) {
2566        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2567        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2568        j+=2;
2569    }
2570}
2571
2572/*===========================================================================
2573 * FUNCTION   : makeOverridesList
2574 *
2575 * DESCRIPTION: make a list of scene mode overrides
2576 *
2577 * PARAMETERS :
2578 *
2579 *
2580 *==========================================================================*/
2581void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2582                                                  uint8_t size, uint8_t* overridesList,
2583                                                  uint8_t* supported_indexes,
2584                                                  int camera_id)
2585{
2586    /*daemon will give a list of overrides for all scene modes.
2587      However we should send the fwk only the overrides for the scene modes
2588      supported by the framework*/
2589    int j = 0, index = 0, supt = 0;
2590    uint8_t focus_override;
2591    for (int i = 0; i < size; i++) {
2592        supt = 0;
2593        index = supported_indexes[i];
2594        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2595        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2596                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2597                                                    overridesTable[index].awb_mode);
2598        focus_override = (uint8_t)overridesTable[index].af_mode;
2599        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2600           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2601              supt = 1;
2602              break;
2603           }
2604        }
2605        if (supt) {
2606           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2607                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2608                                              focus_override);
2609        } else {
2610           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2611        }
2612        j+=3;
2613    }
2614}
2615
2616/*===========================================================================
2617 * FUNCTION   : getPreviewHalPixelFormat
2618 *
2619 * DESCRIPTION: convert the format to type recognized by framework
2620 *
2621 * PARAMETERS : format : the format from backend
2622 *
2623 ** RETURN    : format recognized by framework
2624 *
2625 *==========================================================================*/
2626int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2627{
2628    int32_t halPixelFormat;
2629
2630    switch (format) {
2631    case CAM_FORMAT_YUV_420_NV12:
2632        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2633        break;
2634    case CAM_FORMAT_YUV_420_NV21:
2635        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2636        break;
2637    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2638        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2639        break;
2640    case CAM_FORMAT_YUV_420_YV12:
2641        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2642        break;
2643    case CAM_FORMAT_YUV_422_NV16:
2644    case CAM_FORMAT_YUV_422_NV61:
2645    default:
2646        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2647        break;
2648    }
2649    return halPixelFormat;
2650}
2651
2652/*===========================================================================
2653 * FUNCTION   : getSensorSensitivity
2654 *
2655 * DESCRIPTION: convert iso_mode to an integer value
2656 *
2657 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2658 *
2659 ** RETURN    : sensitivity supported by sensor
2660 *
2661 *==========================================================================*/
2662int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2663{
2664    int32_t sensitivity;
2665
2666    switch (iso_mode) {
2667    case CAM_ISO_MODE_100:
2668        sensitivity = 100;
2669        break;
2670    case CAM_ISO_MODE_200:
2671        sensitivity = 200;
2672        break;
2673    case CAM_ISO_MODE_400:
2674        sensitivity = 400;
2675        break;
2676    case CAM_ISO_MODE_800:
2677        sensitivity = 800;
2678        break;
2679    case CAM_ISO_MODE_1600:
2680        sensitivity = 1600;
2681        break;
2682    default:
2683        sensitivity = -1;
2684        break;
2685    }
2686    return sensitivity;
2687}
2688
2689
2690/*===========================================================================
2691 * FUNCTION   : AddSetParmEntryToBatch
2692 *
2693 * DESCRIPTION: add set parameter entry into batch
2694 *
2695 * PARAMETERS :
2696 *   @p_table     : ptr to parameter buffer
2697 *   @paramType   : parameter type
2698 *   @paramLength : length of parameter value
2699 *   @paramValue  : ptr to parameter value
2700 *
2701 * RETURN     : int32_t type of status
2702 *              NO_ERROR  -- success
2703 *              none-zero failure code
2704 *==========================================================================*/
2705int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2706                                                          cam_intf_parm_type_t paramType,
2707                                                          uint32_t paramLength,
2708                                                          void *paramValue)
2709{
2710    int position = paramType;
2711    int current, next;
2712
2713    /*************************************************************************
2714    *                 Code to take care of linking next flags                *
2715    *************************************************************************/
2716    current = GET_FIRST_PARAM_ID(p_table);
2717    if (position == current){
2718        //DO NOTHING
2719    } else if (position < current){
2720        SET_NEXT_PARAM_ID(position, p_table, current);
2721        SET_FIRST_PARAM_ID(p_table, position);
2722    } else {
2723        /* Search for the position in the linked list where we need to slot in*/
2724        while (position > GET_NEXT_PARAM_ID(current, p_table))
2725            current = GET_NEXT_PARAM_ID(current, p_table);
2726
2727        /*If node already exists no need to alter linking*/
2728        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2729            next = GET_NEXT_PARAM_ID(current, p_table);
2730            SET_NEXT_PARAM_ID(current, p_table, position);
2731            SET_NEXT_PARAM_ID(position, p_table, next);
2732        }
2733    }
2734
2735    /*************************************************************************
2736    *                   Copy contents into entry                             *
2737    *************************************************************************/
2738
2739    if (paramLength > sizeof(parm_type_t)) {
2740        ALOGE("%s:Size of input larger than max entry size",__func__);
2741        return BAD_VALUE;
2742    }
2743    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2744    return NO_ERROR;
2745}
2746
2747/*===========================================================================
2748 * FUNCTION   : lookupFwkName
2749 *
2750 * DESCRIPTION: In case the enum is not same in fwk and backend
2751 *              make sure the parameter is correctly propogated
2752 *
2753 * PARAMETERS  :
2754 *   @arr      : map between the two enums
2755 *   @len      : len of the map
2756 *   @hal_name : name of the hal_parm to map
2757 *
2758 * RETURN     : int type of status
2759 *              fwk_name  -- success
2760 *              none-zero failure code
2761 *==========================================================================*/
2762int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2763                                             int len, int hal_name)
2764{
2765
2766    for (int i = 0; i < len; i++) {
2767        if (arr[i].hal_name == hal_name)
2768            return arr[i].fwk_name;
2769    }
2770
2771    /* Not able to find matching framework type is not necessarily
2772     * an error case. This happens when mm-camera supports more attributes
2773     * than the frameworks do */
2774    ALOGD("%s: Cannot find matching framework type", __func__);
2775    return NAME_NOT_FOUND;
2776}
2777
2778/*===========================================================================
2779 * FUNCTION   : lookupHalName
2780 *
2781 * DESCRIPTION: In case the enum is not same in fwk and backend
2782 *              make sure the parameter is correctly propogated
2783 *
2784 * PARAMETERS  :
2785 *   @arr      : map between the two enums
2786 *   @len      : len of the map
2787 *   @fwk_name : name of the hal_parm to map
2788 *
2789 * RETURN     : int32_t type of status
2790 *              hal_name  -- success
2791 *              none-zero failure code
2792 *==========================================================================*/
2793int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2794                                             int len, int fwk_name)
2795{
2796    for (int i = 0; i < len; i++) {
2797       if (arr[i].fwk_name == fwk_name)
2798           return arr[i].hal_name;
2799    }
2800    ALOGE("%s: Cannot find matching hal type", __func__);
2801    return NAME_NOT_FOUND;
2802}
2803
2804/*===========================================================================
2805 * FUNCTION   : getCapabilities
2806 *
2807 * DESCRIPTION: query camera capabilities
2808 *
2809 * PARAMETERS :
2810 *   @cameraId  : camera Id
2811 *   @info      : camera info struct to be filled in with camera capabilities
2812 *
2813 * RETURN     : int32_t type of status
2814 *              NO_ERROR  -- success
2815 *              none-zero failure code
2816 *==========================================================================*/
2817int QCamera3HardwareInterface::getCamInfo(int cameraId,
2818                                    struct camera_info *info)
2819{
2820    int rc = 0;
2821
2822    if (NULL == gCamCapability[cameraId]) {
2823        rc = initCapabilities(cameraId);
2824        if (rc < 0) {
2825            //pthread_mutex_unlock(&g_camlock);
2826            return rc;
2827        }
2828    }
2829
2830    if (NULL == gStaticMetadata[cameraId]) {
2831        rc = initStaticMetadata(cameraId);
2832        if (rc < 0) {
2833            return rc;
2834        }
2835    }
2836
2837    switch(gCamCapability[cameraId]->position) {
2838    case CAM_POSITION_BACK:
2839        info->facing = CAMERA_FACING_BACK;
2840        break;
2841
2842    case CAM_POSITION_FRONT:
2843        info->facing = CAMERA_FACING_FRONT;
2844        break;
2845
2846    default:
2847        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2848        rc = -1;
2849        break;
2850    }
2851
2852
2853    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2854    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2855    info->static_camera_characteristics = gStaticMetadata[cameraId];
2856
2857    return rc;
2858}
2859
2860/*===========================================================================
2861 * FUNCTION   : translateMetadata
2862 *
2863 * DESCRIPTION: translate the metadata into camera_metadata_t
2864 *
2865 * PARAMETERS : type of the request
2866 *
2867 *
2868 * RETURN     : success: camera_metadata_t*
2869 *              failure: NULL
2870 *
2871 *==========================================================================*/
2872camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2873{
2874    pthread_mutex_lock(&mMutex);
2875
2876    if (mDefaultMetadata[type] != NULL) {
2877        pthread_mutex_unlock(&mMutex);
2878        return mDefaultMetadata[type];
2879    }
2880    //first time we are handling this request
2881    //fill up the metadata structure using the wrapper class
2882    CameraMetadata settings;
2883    //translate from cam_capability_t to camera_metadata_tag_t
2884    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2885    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2886
2887    /*control*/
2888
2889    uint8_t controlIntent = 0;
2890    switch (type) {
2891      case CAMERA3_TEMPLATE_PREVIEW:
2892        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2893        break;
2894      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2895        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2896        break;
2897      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2898        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2899        break;
2900      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2901        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2902        break;
2903      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2904        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2905        break;
2906      default:
2907        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2908        break;
2909    }
2910    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2911
2912    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2913            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2914
2915    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2916    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2917
2918    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2919    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2920
2921    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2922    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2923
2924    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2925    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2926
2927    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2928    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2929
2930    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2931    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2932
2933    static uint8_t focusMode;
2934    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2935        ALOGE("%s: Setting focus mode to auto", __func__);
2936        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2937    } else {
2938        ALOGE("%s: Setting focus mode to off", __func__);
2939        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2940    }
2941    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2942
2943    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2944    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2945
2946    /*flash*/
2947    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2948    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2949
2950    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2951    settings.update(ANDROID_FLASH_FIRING_POWER,
2952            &flashFiringLevel, 1);
2953
2954    /* lens */
2955    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2956    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2957
2958    if (gCamCapability[mCameraId]->filter_densities_count) {
2959        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2960        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2961                        gCamCapability[mCameraId]->filter_densities_count);
2962    }
2963
2964    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2965    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2966
2967    /* frame duration */
2968    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2969    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2970
2971    /* sensitivity */
2972    static const int32_t default_sensitivity = 100;
2973    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2974
2975    /*edge mode*/
2976    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2977    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2978
2979    /*noise reduction mode*/
2980    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2981    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2982
2983    /*color correction mode*/
2984    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2985    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2986
2987    /*transform matrix mode*/
2988    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2989    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2990
2991    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
2992    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
2993
2994    mDefaultMetadata[type] = settings.release();
2995
2996    pthread_mutex_unlock(&mMutex);
2997    return mDefaultMetadata[type];
2998}
2999
3000/*===========================================================================
3001 * FUNCTION   : setFrameParameters
3002 *
3003 * DESCRIPTION: set parameters per frame as requested in the metadata from
3004 *              framework
3005 *
3006 * PARAMETERS :
3007 *   @request   : request that needs to be serviced
3008 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3009 *
3010 * RETURN     : success: NO_ERROR
3011 *              failure:
3012 *==========================================================================*/
3013int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3014                    uint32_t streamTypeMask)
3015{
3016    /*translate from camera_metadata_t type to parm_type_t*/
3017    int rc = 0;
3018    if (request->settings == NULL && mFirstRequest) {
3019        /*settings cannot be null for the first request*/
3020        return BAD_VALUE;
3021    }
3022
3023    int32_t hal_version = CAM_HAL_V3;
3024
3025    memset(mParameters, 0, sizeof(parm_buffer_t));
3026    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3027    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3028                sizeof(hal_version), &hal_version);
3029    if (rc < 0) {
3030        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3031        return BAD_VALUE;
3032    }
3033
3034    /*we need to update the frame number in the parameters*/
3035    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3036                                sizeof(request->frame_number), &(request->frame_number));
3037    if (rc < 0) {
3038        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3039        return BAD_VALUE;
3040    }
3041
3042    /* Update stream id mask where buffers are requested */
3043    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3044                                sizeof(streamTypeMask), &streamTypeMask);
3045    if (rc < 0) {
3046        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3047        return BAD_VALUE;
3048    }
3049
3050    if(request->settings != NULL){
3051        rc = translateMetadataToParameters(request);
3052    }
3053    /*set the parameters to backend*/
3054    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3055    return rc;
3056}
3057
3058/*===========================================================================
3059 * FUNCTION   : translateMetadataToParameters
3060 *
3061 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3062 *
3063 *
3064 * PARAMETERS :
3065 *   @request  : request sent from framework
3066 *
3067 *
3068 * RETURN     : success: NO_ERROR
3069 *              failure:
3070 *==========================================================================*/
3071int QCamera3HardwareInterface::translateMetadataToParameters
3072                                  (const camera3_capture_request_t *request)
3073{
3074    int rc = 0;
3075    CameraMetadata frame_settings;
3076    frame_settings = request->settings;
3077
3078    /* Do not change the order of the following list unless you know what you are
3079     * doing.
3080     * The order is laid out in such a way that parameters in the front of the table
3081     * may be used to override the parameters later in the table. Examples are:
3082     * 1. META_MODE should precede AEC/AWB/AF MODE
3083     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3084     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3085     * 4. Any mode should precede it's corresponding settings
3086     */
3087    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3088        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3089        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3090                sizeof(metaMode), &metaMode);
3091        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3092           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3093           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3094                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3095                                             fwk_sceneMode);
3096           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3097                sizeof(sceneMode), &sceneMode);
3098        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3099           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3100           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3101                sizeof(sceneMode), &sceneMode);
3102        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3103           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3104           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3105                sizeof(sceneMode), &sceneMode);
3106        }
3107    }
3108
3109    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3110        uint8_t fwk_aeMode =
3111            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3112        uint8_t aeMode;
3113        int32_t redeye;
3114
3115        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3116            aeMode = CAM_AE_MODE_OFF;
3117        } else {
3118            aeMode = CAM_AE_MODE_ON;
3119        }
3120        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3121            redeye = 1;
3122        } else {
3123            redeye = 0;
3124        }
3125
3126        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3127                                          sizeof(AE_FLASH_MODE_MAP),
3128                                          fwk_aeMode);
3129        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3130                sizeof(aeMode), &aeMode);
3131        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3132                sizeof(flashMode), &flashMode);
3133        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3134                sizeof(redeye), &redeye);
3135    }
3136
3137    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3138        uint8_t fwk_whiteLevel =
3139            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3140        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3141                sizeof(WHITE_BALANCE_MODES_MAP),
3142                fwk_whiteLevel);
3143        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3144                sizeof(whiteLevel), &whiteLevel);
3145    }
3146
3147    float focalDistance = -1.0;
3148    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3149        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3150        rc = AddSetParmEntryToBatch(mParameters,
3151                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3152                sizeof(focalDistance), &focalDistance);
3153    }
3154
3155    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3156        uint8_t fwk_focusMode =
3157            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3158        uint8_t focusMode;
3159        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3160            focusMode = CAM_FOCUS_MODE_INFINITY;
3161        } else{
3162         focusMode = lookupHalName(FOCUS_MODES_MAP,
3163                                   sizeof(FOCUS_MODES_MAP),
3164                                   fwk_focusMode);
3165        }
3166        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3167                sizeof(focusMode), &focusMode);
3168    }
3169
3170    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3171        int32_t antibandingMode =
3172            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3173        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3174                sizeof(antibandingMode), &antibandingMode);
3175    }
3176
3177    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3178        int32_t expCompensation = frame_settings.find(
3179            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3180        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3181            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3182        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3183            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3184        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3185          sizeof(expCompensation), &expCompensation);
3186    }
3187
3188    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3189        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3190        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3191                sizeof(aeLock), &aeLock);
3192    }
3193    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3194        cam_fps_range_t fps_range;
3195        fps_range.min_fps =
3196            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3197        fps_range.max_fps =
3198            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3199        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3200                sizeof(fps_range), &fps_range);
3201    }
3202
3203    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3204        uint8_t awbLock =
3205            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3206        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3207                sizeof(awbLock), &awbLock);
3208    }
3209
3210    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3211        uint8_t fwk_effectMode =
3212            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3213        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3214                sizeof(EFFECT_MODES_MAP),
3215                fwk_effectMode);
3216        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3217                sizeof(effectMode), &effectMode);
3218    }
3219
3220    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3221        uint8_t colorCorrectMode =
3222            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3223        rc =
3224            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3225                    sizeof(colorCorrectMode), &colorCorrectMode);
3226    }
3227
3228    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3229        cam_color_correct_gains_t colorCorrectGains;
3230        for (int i = 0; i < 4; i++) {
3231            colorCorrectGains.gains[i] =
3232                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3233        }
3234        rc =
3235            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3236                    sizeof(colorCorrectGains), &colorCorrectGains);
3237    }
3238
3239    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3240        cam_color_correct_matrix_t colorCorrectTransform;
3241        cam_rational_type_t transform_elem;
3242        int num = 0;
3243        for (int i = 0; i < 3; i++) {
3244           for (int j = 0; j < 3; j++) {
3245              transform_elem.numerator =
3246                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3247              transform_elem.denominator =
3248                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3249              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3250              num++;
3251           }
3252        }
3253        rc =
3254            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3255                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3256    }
3257
3258    cam_trigger_t aecTrigger;
3259    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3260    aecTrigger.trigger_id = -1;
3261    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3262        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3263        aecTrigger.trigger =
3264            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3265        aecTrigger.trigger_id =
3266            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3267    }
3268    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3269                                sizeof(aecTrigger), &aecTrigger);
3270
3271    /*af_trigger must come with a trigger id*/
3272    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3273        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3274        cam_trigger_t af_trigger;
3275        af_trigger.trigger =
3276            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3277        af_trigger.trigger_id =
3278            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3279        rc = AddSetParmEntryToBatch(mParameters,
3280                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3281    }
3282
3283    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3284        int32_t demosaic =
3285            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3286        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3287                sizeof(demosaic), &demosaic);
3288    }
3289
3290    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3291        cam_edge_application_t edge_application;
3292        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3293        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3294            edge_application.sharpness = 0;
3295        } else {
3296            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3297                int32_t edgeStrength =
3298                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3299                edge_application.sharpness = edgeStrength;
3300            } else {
3301                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3302            }
3303        }
3304        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3305                sizeof(edge_application), &edge_application);
3306    }
3307
3308    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3309        int32_t respectFlashMode = 1;
3310        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3311            uint8_t fwk_aeMode =
3312                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3313            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3314                respectFlashMode = 0;
3315                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3316                    __func__);
3317            }
3318        }
3319        if (respectFlashMode) {
3320            uint8_t flashMode =
3321                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3322            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3323                                          sizeof(FLASH_MODES_MAP),
3324                                          flashMode);
3325            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3326            // To check: CAM_INTF_META_FLASH_MODE usage
3327            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3328                          sizeof(flashMode), &flashMode);
3329        }
3330    }
3331
3332    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3333        uint8_t flashPower =
3334            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3335        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3336                sizeof(flashPower), &flashPower);
3337    }
3338
3339    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3340        int64_t flashFiringTime =
3341            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3342        rc = AddSetParmEntryToBatch(mParameters,
3343                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3344    }
3345
3346    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3347        uint8_t geometricMode =
3348            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3350                sizeof(geometricMode), &geometricMode);
3351    }
3352
3353    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3354        uint8_t geometricStrength =
3355            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3356        rc = AddSetParmEntryToBatch(mParameters,
3357                CAM_INTF_META_GEOMETRIC_STRENGTH,
3358                sizeof(geometricStrength), &geometricStrength);
3359    }
3360
3361    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3362        uint8_t hotPixelMode =
3363            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3364        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3365                sizeof(hotPixelMode), &hotPixelMode);
3366    }
3367
3368    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3369        float lensAperture =
3370            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3371        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3372                sizeof(lensAperture), &lensAperture);
3373    }
3374
3375    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3376        float filterDensity =
3377            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3378        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3379                sizeof(filterDensity), &filterDensity);
3380    }
3381
3382    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3383        float focalLength =
3384            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3385        rc = AddSetParmEntryToBatch(mParameters,
3386                CAM_INTF_META_LENS_FOCAL_LENGTH,
3387                sizeof(focalLength), &focalLength);
3388    }
3389
3390    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3391        uint8_t optStabMode =
3392            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3393        rc = AddSetParmEntryToBatch(mParameters,
3394                CAM_INTF_META_LENS_OPT_STAB_MODE,
3395                sizeof(optStabMode), &optStabMode);
3396    }
3397
3398    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3399        uint8_t noiseRedMode =
3400            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3401        rc = AddSetParmEntryToBatch(mParameters,
3402                CAM_INTF_META_NOISE_REDUCTION_MODE,
3403                sizeof(noiseRedMode), &noiseRedMode);
3404    }
3405
3406    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3407        uint8_t noiseRedStrength =
3408            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3409        rc = AddSetParmEntryToBatch(mParameters,
3410                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3411                sizeof(noiseRedStrength), &noiseRedStrength);
3412    }
3413
3414    cam_crop_region_t scalerCropRegion;
3415    bool scalerCropSet = false;
3416    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3417        scalerCropRegion.left =
3418            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3419        scalerCropRegion.top =
3420            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3421        scalerCropRegion.width =
3422            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3423        scalerCropRegion.height =
3424            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3425        rc = AddSetParmEntryToBatch(mParameters,
3426                CAM_INTF_META_SCALER_CROP_REGION,
3427                sizeof(scalerCropRegion), &scalerCropRegion);
3428        scalerCropSet = true;
3429    }
3430
3431    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3432        int64_t sensorExpTime =
3433            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3434        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3435        rc = AddSetParmEntryToBatch(mParameters,
3436                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3437                sizeof(sensorExpTime), &sensorExpTime);
3438    }
3439
3440    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3441        int64_t sensorFrameDuration =
3442            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3443        int64_t minFrameDuration = getMinFrameDuration(request);
3444        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3445        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3446            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3447        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3448        rc = AddSetParmEntryToBatch(mParameters,
3449                CAM_INTF_META_SENSOR_FRAME_DURATION,
3450                sizeof(sensorFrameDuration), &sensorFrameDuration);
3451    }
3452
3453    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3454        int32_t sensorSensitivity =
3455            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3456        if (sensorSensitivity <
3457                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3458            sensorSensitivity =
3459                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3460        if (sensorSensitivity >
3461                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3462            sensorSensitivity =
3463                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3464        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3465        rc = AddSetParmEntryToBatch(mParameters,
3466                CAM_INTF_META_SENSOR_SENSITIVITY,
3467                sizeof(sensorSensitivity), &sensorSensitivity);
3468    }
3469
3470    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3471        int32_t shadingMode =
3472            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3473        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3474                sizeof(shadingMode), &shadingMode);
3475    }
3476
3477    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3478        uint8_t shadingStrength =
3479            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3480        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3481                sizeof(shadingStrength), &shadingStrength);
3482    }
3483
3484    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3485        uint8_t fwk_facedetectMode =
3486            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3487        uint8_t facedetectMode =
3488            lookupHalName(FACEDETECT_MODES_MAP,
3489                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3490        rc = AddSetParmEntryToBatch(mParameters,
3491                CAM_INTF_META_STATS_FACEDETECT_MODE,
3492                sizeof(facedetectMode), &facedetectMode);
3493    }
3494
3495    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3496        uint8_t histogramMode =
3497            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3498        rc = AddSetParmEntryToBatch(mParameters,
3499                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3500                sizeof(histogramMode), &histogramMode);
3501    }
3502
3503    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3504        uint8_t sharpnessMapMode =
3505            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3506        rc = AddSetParmEntryToBatch(mParameters,
3507                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3508                sizeof(sharpnessMapMode), &sharpnessMapMode);
3509    }
3510
3511    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3512        uint8_t tonemapMode =
3513            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3514        rc = AddSetParmEntryToBatch(mParameters,
3515                CAM_INTF_META_TONEMAP_MODE,
3516                sizeof(tonemapMode), &tonemapMode);
3517    }
3518    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3519    /*All tonemap channels will have the same number of points*/
3520    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3521        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3522        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3523        cam_rgb_tonemap_curves tonemapCurves;
3524        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3525
3526        /* ch0 = G*/
3527        int point = 0;
3528        cam_tonemap_curve_t tonemapCurveGreen;
3529        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3530            for (int j = 0; j < 2; j++) {
3531               tonemapCurveGreen.tonemap_points[i][j] =
3532                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3533               point++;
3534            }
3535        }
3536        tonemapCurves.curves[0] = tonemapCurveGreen;
3537
3538        /* ch 1 = B */
3539        point = 0;
3540        cam_tonemap_curve_t tonemapCurveBlue;
3541        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3542            for (int j = 0; j < 2; j++) {
3543               tonemapCurveBlue.tonemap_points[i][j] =
3544                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3545               point++;
3546            }
3547        }
3548        tonemapCurves.curves[1] = tonemapCurveBlue;
3549
3550        /* ch 2 = R */
3551        point = 0;
3552        cam_tonemap_curve_t tonemapCurveRed;
3553        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3554            for (int j = 0; j < 2; j++) {
3555               tonemapCurveRed.tonemap_points[i][j] =
3556                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3557               point++;
3558            }
3559        }
3560        tonemapCurves.curves[2] = tonemapCurveRed;
3561
3562        rc = AddSetParmEntryToBatch(mParameters,
3563                CAM_INTF_META_TONEMAP_CURVES,
3564                sizeof(tonemapCurves), &tonemapCurves);
3565    }
3566
3567    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3568        uint8_t captureIntent =
3569            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3570        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3571                sizeof(captureIntent), &captureIntent);
3572    }
3573
3574    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3575        uint8_t blackLevelLock =
3576            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3577        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3578                sizeof(blackLevelLock), &blackLevelLock);
3579    }
3580
3581    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3582        uint8_t lensShadingMapMode =
3583            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3584        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3585                sizeof(lensShadingMapMode), &lensShadingMapMode);
3586    }
3587
3588    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3589        cam_area_t roi;
3590        bool reset = true;
3591        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3592        if (scalerCropSet) {
3593            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3594        }
3595        if (reset) {
3596            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3597                    sizeof(roi), &roi);
3598        }
3599    }
3600
3601    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3602        cam_area_t roi;
3603        bool reset = true;
3604        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3605        if (scalerCropSet) {
3606            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3607        }
3608        if (reset) {
3609            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3610                    sizeof(roi), &roi);
3611        }
3612    }
3613
3614    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3615        cam_area_t roi;
3616        bool reset = true;
3617        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3618        if (scalerCropSet) {
3619            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3620        }
3621        if (reset) {
3622            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3623                    sizeof(roi), &roi);
3624        }
3625    }
3626    return rc;
3627}
3628
3629/*===========================================================================
3630 * FUNCTION   : getJpegSettings
3631 *
3632 * DESCRIPTION: save the jpeg settings in the HAL
3633 *
3634 *
3635 * PARAMETERS :
3636 *   @settings  : frame settings information from framework
3637 *
3638 *
3639 * RETURN     : success: NO_ERROR
3640 *              failure:
3641 *==========================================================================*/
3642int QCamera3HardwareInterface::getJpegSettings
3643                                  (const camera_metadata_t *settings)
3644{
3645    if (mJpegSettings) {
3646        if (mJpegSettings->gps_timestamp) {
3647            free(mJpegSettings->gps_timestamp);
3648            mJpegSettings->gps_timestamp = NULL;
3649        }
3650        if (mJpegSettings->gps_coordinates) {
3651            for (int i = 0; i < 3; i++) {
3652                free(mJpegSettings->gps_coordinates[i]);
3653                mJpegSettings->gps_coordinates[i] = NULL;
3654            }
3655        }
3656        free(mJpegSettings);
3657        mJpegSettings = NULL;
3658    }
3659    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3660    CameraMetadata jpeg_settings;
3661    jpeg_settings = settings;
3662
3663    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3664        mJpegSettings->jpeg_orientation =
3665            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3666    } else {
3667        mJpegSettings->jpeg_orientation = 0;
3668    }
3669    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3670        mJpegSettings->jpeg_quality =
3671            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3672    } else {
3673        mJpegSettings->jpeg_quality = 85;
3674    }
3675    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3676        mJpegSettings->thumbnail_size.width =
3677            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3678        mJpegSettings->thumbnail_size.height =
3679            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3680    } else {
3681        mJpegSettings->thumbnail_size.width = 0;
3682        mJpegSettings->thumbnail_size.height = 0;
3683    }
3684    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3685        for (int i = 0; i < 3; i++) {
3686            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3687            *(mJpegSettings->gps_coordinates[i]) =
3688                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3689        }
3690    } else{
3691       for (int i = 0; i < 3; i++) {
3692            mJpegSettings->gps_coordinates[i] = NULL;
3693        }
3694    }
3695
3696    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3697        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3698        *(mJpegSettings->gps_timestamp) =
3699            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3700    } else {
3701        mJpegSettings->gps_timestamp = NULL;
3702    }
3703
3704    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3705        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3706        for (int i = 0; i < len; i++) {
3707            mJpegSettings->gps_processing_method[i] =
3708                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3709        }
3710        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3711            mJpegSettings->gps_processing_method[len] = '\0';
3712        }
3713    } else {
3714        mJpegSettings->gps_processing_method[0] = '\0';
3715    }
3716
3717    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3718        mJpegSettings->sensor_sensitivity =
3719            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3720    } else {
3721        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3722    }
3723
3724    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3725
3726    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3727        mJpegSettings->lens_focal_length =
3728            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3729    }
3730    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3731        mJpegSettings->exposure_compensation =
3732            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3733    }
3734    mJpegSettings->sharpness = 10; //default value
3735    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3736        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3737        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3738            mJpegSettings->sharpness = 0;
3739        }
3740    }
3741    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3742    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3743    mJpegSettings->is_jpeg_format = true;
3744    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3745    return 0;
3746}
3747
3748/*===========================================================================
3749 * FUNCTION   : captureResultCb
3750 *
3751 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3752 *
3753 * PARAMETERS :
3754 *   @frame  : frame information from mm-camera-interface
3755 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3756 *   @userdata: userdata
3757 *
3758 * RETURN     : NONE
3759 *==========================================================================*/
3760void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3761                camera3_stream_buffer_t *buffer,
3762                uint32_t frame_number, void *userdata)
3763{
3764    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3765    if (hw == NULL) {
3766        ALOGE("%s: Invalid hw %p", __func__, hw);
3767        return;
3768    }
3769
3770    hw->captureResultCb(metadata, buffer, frame_number);
3771    return;
3772}
3773
3774
3775/*===========================================================================
3776 * FUNCTION   : initialize
3777 *
3778 * DESCRIPTION: Pass framework callback pointers to HAL
3779 *
3780 * PARAMETERS :
3781 *
3782 *
3783 * RETURN     : Success : 0
3784 *              Failure: -ENODEV
3785 *==========================================================================*/
3786
3787int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3788                                  const camera3_callback_ops_t *callback_ops)
3789{
3790    ALOGV("%s: E", __func__);
3791    QCamera3HardwareInterface *hw =
3792        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3793    if (!hw) {
3794        ALOGE("%s: NULL camera device", __func__);
3795        return -ENODEV;
3796    }
3797
3798    int rc = hw->initialize(callback_ops);
3799    ALOGV("%s: X", __func__);
3800    return rc;
3801}
3802
3803/*===========================================================================
3804 * FUNCTION   : configure_streams
3805 *
3806 * DESCRIPTION:
3807 *
3808 * PARAMETERS :
3809 *
3810 *
3811 * RETURN     : Success: 0
3812 *              Failure: -EINVAL (if stream configuration is invalid)
3813 *                       -ENODEV (fatal error)
3814 *==========================================================================*/
3815
3816int QCamera3HardwareInterface::configure_streams(
3817        const struct camera3_device *device,
3818        camera3_stream_configuration_t *stream_list)
3819{
3820    ALOGV("%s: E", __func__);
3821    QCamera3HardwareInterface *hw =
3822        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3823    if (!hw) {
3824        ALOGE("%s: NULL camera device", __func__);
3825        return -ENODEV;
3826    }
3827    int rc = hw->configureStreams(stream_list);
3828    ALOGV("%s: X", __func__);
3829    return rc;
3830}
3831
3832/*===========================================================================
3833 * FUNCTION   : register_stream_buffers
3834 *
3835 * DESCRIPTION: Register stream buffers with the device
3836 *
3837 * PARAMETERS :
3838 *
3839 * RETURN     :
3840 *==========================================================================*/
3841int QCamera3HardwareInterface::register_stream_buffers(
3842        const struct camera3_device *device,
3843        const camera3_stream_buffer_set_t *buffer_set)
3844{
3845    ALOGV("%s: E", __func__);
3846    QCamera3HardwareInterface *hw =
3847        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3848    if (!hw) {
3849        ALOGE("%s: NULL camera device", __func__);
3850        return -ENODEV;
3851    }
3852    int rc = hw->registerStreamBuffers(buffer_set);
3853    ALOGV("%s: X", __func__);
3854    return rc;
3855}
3856
3857/*===========================================================================
3858 * FUNCTION   : construct_default_request_settings
3859 *
3860 * DESCRIPTION: Configure a settings buffer to meet the required use case
3861 *
3862 * PARAMETERS :
3863 *
3864 *
3865 * RETURN     : Success: Return valid metadata
3866 *              Failure: Return NULL
3867 *==========================================================================*/
3868const camera_metadata_t* QCamera3HardwareInterface::
3869    construct_default_request_settings(const struct camera3_device *device,
3870                                        int type)
3871{
3872
3873    ALOGV("%s: E", __func__);
3874    camera_metadata_t* fwk_metadata = NULL;
3875    QCamera3HardwareInterface *hw =
3876        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3877    if (!hw) {
3878        ALOGE("%s: NULL camera device", __func__);
3879        return NULL;
3880    }
3881
3882    fwk_metadata = hw->translateCapabilityToMetadata(type);
3883
3884    ALOGV("%s: X", __func__);
3885    return fwk_metadata;
3886}
3887
3888/*===========================================================================
3889 * FUNCTION   : process_capture_request
3890 *
3891 * DESCRIPTION:
3892 *
3893 * PARAMETERS :
3894 *
3895 *
3896 * RETURN     :
3897 *==========================================================================*/
3898int QCamera3HardwareInterface::process_capture_request(
3899                    const struct camera3_device *device,
3900                    camera3_capture_request_t *request)
3901{
3902    ALOGV("%s: E", __func__);
3903    QCamera3HardwareInterface *hw =
3904        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3905    if (!hw) {
3906        ALOGE("%s: NULL camera device", __func__);
3907        return -EINVAL;
3908    }
3909
3910    int rc = hw->processCaptureRequest(request);
3911    ALOGV("%s: X", __func__);
3912    return rc;
3913}
3914
3915/*===========================================================================
3916 * FUNCTION   : get_metadata_vendor_tag_ops
3917 *
3918 * DESCRIPTION:
3919 *
3920 * PARAMETERS :
3921 *
3922 *
3923 * RETURN     :
3924 *==========================================================================*/
3925
3926void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3927                const struct camera3_device *device,
3928                vendor_tag_query_ops_t* ops)
3929{
3930    ALOGV("%s: E", __func__);
3931    QCamera3HardwareInterface *hw =
3932        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3933    if (!hw) {
3934        ALOGE("%s: NULL camera device", __func__);
3935        return;
3936    }
3937
3938    hw->getMetadataVendorTagOps(ops);
3939    ALOGV("%s: X", __func__);
3940    return;
3941}
3942
3943/*===========================================================================
3944 * FUNCTION   : dump
3945 *
3946 * DESCRIPTION:
3947 *
3948 * PARAMETERS :
3949 *
3950 *
3951 * RETURN     :
3952 *==========================================================================*/
3953
3954void QCamera3HardwareInterface::dump(
3955                const struct camera3_device *device, int fd)
3956{
3957    ALOGV("%s: E", __func__);
3958    QCamera3HardwareInterface *hw =
3959        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3960    if (!hw) {
3961        ALOGE("%s: NULL camera device", __func__);
3962        return;
3963    }
3964
3965    hw->dump(fd);
3966    ALOGV("%s: X", __func__);
3967    return;
3968}
3969
3970/*===========================================================================
3971 * FUNCTION   : flush
3972 *
3973 * DESCRIPTION:
3974 *
3975 * PARAMETERS :
3976 *
3977 *
3978 * RETURN     :
3979 *==========================================================================*/
3980
3981int QCamera3HardwareInterface::flush(
3982                const struct camera3_device *device)
3983{
3984    int rc;
3985    ALOGV("%s: E", __func__);
3986    QCamera3HardwareInterface *hw =
3987        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3988    if (!hw) {
3989        ALOGE("%s: NULL camera device", __func__);
3990        return -EINVAL;
3991    }
3992
3993    rc = hw->flush();
3994    ALOGV("%s: X", __func__);
3995    return rc;
3996}
3997
3998/*===========================================================================
3999 * FUNCTION   : close_camera_device
4000 *
4001 * DESCRIPTION:
4002 *
4003 * PARAMETERS :
4004 *
4005 *
4006 * RETURN     :
4007 *==========================================================================*/
4008int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4009{
4010    ALOGV("%s: E", __func__);
4011    int ret = NO_ERROR;
4012    QCamera3HardwareInterface *hw =
4013        reinterpret_cast<QCamera3HardwareInterface *>(
4014            reinterpret_cast<camera3_device_t *>(device)->priv);
4015    if (!hw) {
4016        ALOGE("NULL camera device");
4017        return BAD_VALUE;
4018    }
4019    delete hw;
4020
4021    pthread_mutex_lock(&mCameraSessionLock);
4022    mCameraSessionActive = 0;
4023    pthread_mutex_unlock(&mCameraSessionLock);
4024    ALOGV("%s: X", __func__);
4025    return ret;
4026}
4027
4028/*===========================================================================
4029 * FUNCTION   : getWaveletDenoiseProcessPlate
4030 *
4031 * DESCRIPTION: query wavelet denoise process plate
4032 *
4033 * PARAMETERS : None
4034 *
4035 * RETURN     : WNR prcocess plate vlaue
4036 *==========================================================================*/
4037cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4038{
4039    char prop[PROPERTY_VALUE_MAX];
4040    memset(prop, 0, sizeof(prop));
4041    property_get("persist.denoise.process.plates", prop, "0");
4042    int processPlate = atoi(prop);
4043    switch(processPlate) {
4044    case 0:
4045        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4046    case 1:
4047        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4048    case 2:
4049        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4050    case 3:
4051        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4052    default:
4053        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4054    }
4055}
4056
4057/*===========================================================================
4058 * FUNCTION   : needRotationReprocess
4059 *
4060 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4061 *
4062 * PARAMETERS : none
4063 *
4064 * RETURN     : true: needed
4065 *              false: no need
4066 *==========================================================================*/
4067bool QCamera3HardwareInterface::needRotationReprocess()
4068{
4069
4070    if (!mJpegSettings->is_jpeg_format) {
4071        // RAW image, no need to reprocess
4072        return false;
4073    }
4074
4075    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4076        mJpegSettings->jpeg_orientation > 0) {
4077        // current rotation is not zero, and pp has the capability to process rotation
4078        ALOGD("%s: need do reprocess for rotation", __func__);
4079        return true;
4080    }
4081
4082    return false;
4083}
4084
4085/*===========================================================================
4086 * FUNCTION   : needReprocess
4087 *
4088 * DESCRIPTION: if reprocess in needed
4089 *
4090 * PARAMETERS : none
4091 *
4092 * RETURN     : true: needed
4093 *              false: no need
4094 *==========================================================================*/
4095bool QCamera3HardwareInterface::needReprocess()
4096{
4097    if (!mJpegSettings->is_jpeg_format) {
4098        // RAW image, no need to reprocess
4099        return false;
4100    }
4101
4102    if ((mJpegSettings->min_required_pp_mask > 0) ||
4103         isWNREnabled()) {
4104        // TODO: add for ZSL HDR later
4105        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4106        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4107        return true;
4108    }
4109    return needRotationReprocess();
4110}
4111
4112/*===========================================================================
4113 * FUNCTION   : addOnlineReprocChannel
4114 *
4115 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4116 *              coming from input channel
4117 *
4118 * PARAMETERS :
4119 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4120 *
4121 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4122 *==========================================================================*/
4123QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4124              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4125{
4126    int32_t rc = NO_ERROR;
4127    QCamera3ReprocessChannel *pChannel = NULL;
4128    if (pInputChannel == NULL) {
4129        ALOGE("%s: input channel obj is NULL", __func__);
4130        return NULL;
4131    }
4132
4133    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4134            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4135    if (NULL == pChannel) {
4136        ALOGE("%s: no mem for reprocess channel", __func__);
4137        return NULL;
4138    }
4139
4140    // Capture channel, only need snapshot and postview streams start together
4141    mm_camera_channel_attr_t attr;
4142    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4143    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4144    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4145    rc = pChannel->initialize();
4146    if (rc != NO_ERROR) {
4147        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4148        delete pChannel;
4149        return NULL;
4150    }
4151
4152    // pp feature config
4153    cam_pp_feature_config_t pp_config;
4154    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4155    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4156        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4157        pp_config.sharpness = mJpegSettings->sharpness;
4158    }
4159
4160    if (isWNREnabled()) {
4161        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4162        pp_config.denoise2d.denoise_enable = 1;
4163        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4164    }
4165    if (needRotationReprocess()) {
4166        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4167        int rotation = mJpegSettings->jpeg_orientation;
4168        if (rotation == 0) {
4169            pp_config.rotation = ROTATE_0;
4170        } else if (rotation == 90) {
4171            pp_config.rotation = ROTATE_90;
4172        } else if (rotation == 180) {
4173            pp_config.rotation = ROTATE_180;
4174        } else if (rotation == 270) {
4175            pp_config.rotation = ROTATE_270;
4176        }
4177    }
4178
4179   rc = pChannel->addReprocStreamsFromSource(pp_config,
4180                                             pInputChannel,
4181                                             mMetadataChannel);
4182
4183    if (rc != NO_ERROR) {
4184        delete pChannel;
4185        return NULL;
4186    }
4187    return pChannel;
4188}
4189
4190int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4191{
4192    return gCamCapability[mCameraId]->min_num_pp_bufs;
4193}
4194
4195bool QCamera3HardwareInterface::isWNREnabled() {
4196    return gCamCapability[mCameraId]->isWnrSupported;
4197}
4198
4199}; //end namespace qcamera
4200