QCamera3HWI.cpp revision 62595db7d19b3c5997145f35f1d26b87102aa984
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    mCallbackOps = callback_ops;
405
406    pthread_mutex_unlock(&mMutex);
407    mCameraInitialized = true;
408    return 0;
409
410err1:
411    pthread_mutex_unlock(&mMutex);
412    return rc;
413}
414
415/*===========================================================================
416 * FUNCTION   : configureStreams
417 *
418 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
419 *              and output streams.
420 *
421 * PARAMETERS :
422 *   @stream_list : streams to be configured
423 *
424 * RETURN     :
425 *
426 *==========================================================================*/
427int QCamera3HardwareInterface::configureStreams(
428        camera3_stream_configuration_t *streamList)
429{
430    int rc = 0;
431    mIsZslMode = false;
432
433    // Sanity check stream_list
434    if (streamList == NULL) {
435        ALOGE("%s: NULL stream configuration", __func__);
436        return BAD_VALUE;
437    }
438    if (streamList->streams == NULL) {
439        ALOGE("%s: NULL stream list", __func__);
440        return BAD_VALUE;
441    }
442
443    if (streamList->num_streams < 1) {
444        ALOGE("%s: Bad number of streams requested: %d", __func__,
445                streamList->num_streams);
446        return BAD_VALUE;
447    }
448
449    /* first invalidate all the steams in the mStreamList
450     * if they appear again, they will be validated */
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end(); it++) {
453        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454        channel->stop();
455        (*it)->status = INVALID;
456    }
457    if (mMetadataChannel) {
458        /* If content of mStreamInfo is not 0, there is metadata stream */
459        mMetadataChannel->stop();
460    }
461
462    pthread_mutex_lock(&mMutex);
463
464    camera3_stream_t *inputStream = NULL;
465    camera3_stream_t *jpegStream = NULL;
466    cam_stream_size_info_t stream_config_info;
467
468    for (size_t i = 0; i < streamList->num_streams; i++) {
469        camera3_stream_t *newStream = streamList->streams[i];
470        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
471                __func__, newStream->stream_type, newStream->format,
472                 newStream->width, newStream->height);
473        //if the stream is in the mStreamList validate it
474        bool stream_exists = false;
475        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
476                it != mStreamInfo.end(); it++) {
477            if ((*it)->stream == newStream) {
478                QCamera3Channel *channel =
479                    (QCamera3Channel*)(*it)->stream->priv;
480                stream_exists = true;
481                (*it)->status = RECONFIGURE;
482                /*delete the channel object associated with the stream because
483                  we need to reconfigure*/
484                delete channel;
485                (*it)->stream->priv = NULL;
486            }
487        }
488        if (!stream_exists) {
489            //new stream
490            stream_info_t* stream_info;
491            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
492            stream_info->stream = newStream;
493            stream_info->status = VALID;
494            stream_info->registered = 0;
495            mStreamInfo.push_back(stream_info);
496        }
497        if (newStream->stream_type == CAMERA3_STREAM_INPUT
498                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
499            if (inputStream != NULL) {
500                ALOGE("%s: Multiple input streams requested!", __func__);
501                pthread_mutex_unlock(&mMutex);
502                return BAD_VALUE;
503            }
504            inputStream = newStream;
505        }
506        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
507            jpegStream = newStream;
508        }
509    }
510    mInputStream = inputStream;
511
512    /*clean up invalid streams*/
513    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
514            it != mStreamInfo.end();) {
515        if(((*it)->status) == INVALID){
516            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
517            delete channel;
518            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
519            free(*it);
520            it = mStreamInfo.erase(it);
521        } else {
522            it++;
523        }
524    }
525    if (mMetadataChannel) {
526        delete mMetadataChannel;
527        mMetadataChannel = NULL;
528    }
529
530    //Create metadata channel and initialize it
531    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
532                    mCameraHandle->ops, captureResultCb,
533                    &gCamCapability[mCameraId]->padding_info, this);
534    if (mMetadataChannel == NULL) {
535        ALOGE("%s: failed to allocate metadata channel", __func__);
536        rc = -ENOMEM;
537        pthread_mutex_unlock(&mMutex);
538        return rc;
539    }
540    rc = mMetadataChannel->initialize();
541    if (rc < 0) {
542        ALOGE("%s: metadata channel initialization failed", __func__);
543        delete mMetadataChannel;
544        pthread_mutex_unlock(&mMutex);
545        return rc;
546    }
547
548    /* Allocate channel objects for the requested streams */
549    for (size_t i = 0; i < streamList->num_streams; i++) {
550        camera3_stream_t *newStream = streamList->streams[i];
551        uint32_t stream_usage = newStream->usage;
552        stream_config_info.stream_sizes[i].width = newStream->width;
553        stream_config_info.stream_sizes[i].height = newStream->height;
554        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
555            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
556            //for zsl stream the size is jpeg size
557            stream_config_info.stream_sizes[i].width = jpegStream->width;
558            stream_config_info.stream_sizes[i].height = jpegStream->height;
559            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
560        } else {
561           //for non zsl streams find out the format
562           switch (newStream->format) {
563           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
564              {
565                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
566                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
567                 } else {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
569                 }
570              }
571              break;
572           case HAL_PIXEL_FORMAT_YCbCr_420_888:
573              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
574              break;
575           case HAL_PIXEL_FORMAT_BLOB:
576              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
577              break;
578           default:
579              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
580              break;
581           }
582        }
583        if (newStream->priv == NULL) {
584            //New stream, construct channel
585            switch (newStream->stream_type) {
586            case CAMERA3_STREAM_INPUT:
587                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
588                break;
589            case CAMERA3_STREAM_BIDIRECTIONAL:
590                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
591                    GRALLOC_USAGE_HW_CAMERA_WRITE;
592                break;
593            case CAMERA3_STREAM_OUTPUT:
594                /* For video encoding stream, set read/write rarely
595                 * flag so that they may be set to un-cached */
596                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
597                    newStream->usage =
598                         (GRALLOC_USAGE_SW_READ_RARELY |
599                         GRALLOC_USAGE_SW_WRITE_RARELY |
600                         GRALLOC_USAGE_HW_CAMERA_WRITE);
601                else
602                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
603                break;
604            default:
605                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
606                break;
607            }
608
609            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
610                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
611                QCamera3Channel *channel;
612                switch (newStream->format) {
613                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
614                case HAL_PIXEL_FORMAT_YCbCr_420_888:
615                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
616                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
617                        jpegStream) {
618                        uint32_t width = jpegStream->width;
619                        uint32_t height = jpegStream->height;
620                        mIsZslMode = true;
621                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
622                            mCameraHandle->ops, captureResultCb,
623                            &gCamCapability[mCameraId]->padding_info, this, newStream,
624                            width, height);
625                    } else
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream);
629                    if (channel == NULL) {
630                        ALOGE("%s: allocation of channel failed", __func__);
631                        pthread_mutex_unlock(&mMutex);
632                        return -ENOMEM;
633                    }
634
635                    newStream->priv = channel;
636                    break;
637                case HAL_PIXEL_FORMAT_BLOB:
638                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
639                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
640                            mCameraHandle->ops, captureResultCb,
641                            &gCamCapability[mCameraId]->padding_info, this, newStream);
642                    if (mPictureChannel == NULL) {
643                        ALOGE("%s: allocation of channel failed", __func__);
644                        pthread_mutex_unlock(&mMutex);
645                        return -ENOMEM;
646                    }
647                    newStream->priv = (QCamera3Channel*)mPictureChannel;
648                    break;
649
650                //TODO: Add support for app consumed format?
651                default:
652                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
653                    break;
654                }
655            }
656        } else {
657            // Channel already exists for this stream
658            // Do nothing for now
659        }
660    }
661    /*For the streams to be reconfigured we need to register the buffers
662      since the framework wont*/
663    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664            it != mStreamInfo.end(); it++) {
665        if ((*it)->status == RECONFIGURE) {
666            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
667            /*only register buffers for streams that have already been
668              registered*/
669            if ((*it)->registered) {
670                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
671                        (*it)->buffer_set.buffers);
672                if (rc != NO_ERROR) {
673                    ALOGE("%s: Failed to register the buffers of old stream,\
674                            rc = %d", __func__, rc);
675                }
676                ALOGV("%s: channel %p has %d buffers",
677                        __func__, channel, (*it)->buffer_set.num_buffers);
678            }
679        }
680
681        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
682        if (index == NAME_NOT_FOUND) {
683            mPendingBuffersMap.add((*it)->stream, 0);
684        } else {
685            mPendingBuffersMap.editValueAt(index) = 0;
686        }
687    }
688
689    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
690    mPendingRequestsList.clear();
691
692    /*flush the metadata list*/
693    if (!mStoredMetadataList.empty()) {
694        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
695              m != mStoredMetadataList.end(); m++) {
696            mMetadataChannel->bufDone(m->meta_buf);
697            free(m->meta_buf);
698            m = mStoredMetadataList.erase(m);
699        }
700    }
701    int32_t hal_version = CAM_HAL_V3;
702    stream_config_info.num_streams = streamList->num_streams;
703
704    //settings/parameters don't carry over for new configureStreams
705    memset(mParameters, 0, sizeof(parm_buffer_t));
706
707    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
708    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
709                sizeof(hal_version), &hal_version);
710
711    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
712                sizeof(stream_config_info), &stream_config_info);
713
714    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
715
716    mFirstRequest = true;
717
718    //Get min frame duration for this streams configuration
719    deriveMinFrameDuration();
720
721    pthread_mutex_unlock(&mMutex);
722    return rc;
723}
724
725/*===========================================================================
726 * FUNCTION   : validateCaptureRequest
727 *
728 * DESCRIPTION: validate a capture request from camera service
729 *
730 * PARAMETERS :
731 *   @request : request from framework to process
732 *
733 * RETURN     :
734 *
735 *==========================================================================*/
736int QCamera3HardwareInterface::validateCaptureRequest(
737                    camera3_capture_request_t *request)
738{
739    ssize_t idx = 0;
740    const camera3_stream_buffer_t *b;
741    CameraMetadata meta;
742
743    /* Sanity check the request */
744    if (request == NULL) {
745        ALOGE("%s: NULL capture request", __func__);
746        return BAD_VALUE;
747    }
748
749    uint32_t frameNumber = request->frame_number;
750    if (request->input_buffer != NULL &&
751            request->input_buffer->stream != mInputStream) {
752        ALOGE("%s: Request %d: Input buffer not from input stream!",
753                __FUNCTION__, frameNumber);
754        return BAD_VALUE;
755    }
756    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
757        ALOGE("%s: Request %d: No output buffers provided!",
758                __FUNCTION__, frameNumber);
759        return BAD_VALUE;
760    }
761    if (request->input_buffer != NULL) {
762        b = request->input_buffer;
763        QCamera3Channel *channel =
764            static_cast<QCamera3Channel*>(b->stream->priv);
765        if (channel == NULL) {
766            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
767                    __func__, frameNumber, idx);
768            return BAD_VALUE;
769        }
770        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
771            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
772                    __func__, frameNumber, idx);
773            return BAD_VALUE;
774        }
775        if (b->release_fence != -1) {
776            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
777                    __func__, frameNumber, idx);
778            return BAD_VALUE;
779        }
780        if (b->buffer == NULL) {
781            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
782                    __func__, frameNumber, idx);
783            return BAD_VALUE;
784        }
785    }
786
787    // Validate all buffers
788    b = request->output_buffers;
789    do {
790        QCamera3Channel *channel =
791                static_cast<QCamera3Channel*>(b->stream->priv);
792        if (channel == NULL) {
793            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
794                    __func__, frameNumber, idx);
795            return BAD_VALUE;
796        }
797        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
798            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->release_fence != -1) {
803            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807        if (b->buffer == NULL) {
808            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
809                    __func__, frameNumber, idx);
810            return BAD_VALUE;
811        }
812        idx++;
813        b = request->output_buffers + idx;
814    } while (idx < (ssize_t)request->num_output_buffers);
815
816    return NO_ERROR;
817}
818
819/*===========================================================================
820 * FUNCTION   : deriveMinFrameDuration
821 *
822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
823 *              on currently configured streams.
824 *
825 * PARAMETERS : NONE
826 *
827 * RETURN     : NONE
828 *
829 *==========================================================================*/
830void QCamera3HardwareInterface::deriveMinFrameDuration()
831{
832    int32_t maxJpegDimension, maxProcessedDimension;
833
834    maxJpegDimension = 0;
835    maxProcessedDimension = 0;
836
837    // Figure out maximum jpeg, processed, and raw dimensions
838    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
839        it != mStreamInfo.end(); it++) {
840
841        // Input stream doesn't have valid stream_type
842        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
843            continue;
844
845        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
846        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
847            if (dimension > maxJpegDimension)
848                maxJpegDimension = dimension;
849        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
850            if (dimension > maxProcessedDimension)
851                maxProcessedDimension = dimension;
852        }
853    }
854
855    //Assume all jpeg dimensions are in processed dimensions.
856    if (maxJpegDimension > maxProcessedDimension)
857        maxProcessedDimension = maxJpegDimension;
858
859    //Find minimum durations for processed, jpeg, and raw
860    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
861    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
862        if (maxProcessedDimension ==
863            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
864            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
865            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
866            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
867            break;
868        }
869    }
870}
871
872/*===========================================================================
873 * FUNCTION   : getMinFrameDuration
874 *
875 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
876 *              and current request configuration.
877 *
878 * PARAMETERS : @request: requset sent by the frameworks
879 *
880 * RETURN     : min farme duration for a particular request
881 *
882 *==========================================================================*/
883int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
884{
885    bool hasJpegStream = false;
886    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
887        const camera3_stream_t *stream = request->output_buffers[i].stream;
888        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
889            hasJpegStream = true;
890    }
891
892    if (!hasJpegStream)
893        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
894    else
895        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
896}
897
898/*===========================================================================
899 * FUNCTION   : registerStreamBuffers
900 *
901 * DESCRIPTION: Register buffers for a given stream with the HAL device.
902 *
903 * PARAMETERS :
904 *   @stream_list : streams to be configured
905 *
906 * RETURN     :
907 *
908 *==========================================================================*/
909int QCamera3HardwareInterface::registerStreamBuffers(
910        const camera3_stream_buffer_set_t *buffer_set)
911{
912    int rc = 0;
913
914    pthread_mutex_lock(&mMutex);
915
916    if (buffer_set == NULL) {
917        ALOGE("%s: Invalid buffer_set parameter.", __func__);
918        pthread_mutex_unlock(&mMutex);
919        return -EINVAL;
920    }
921    if (buffer_set->stream == NULL) {
922        ALOGE("%s: Invalid stream parameter.", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return -EINVAL;
925    }
926    if (buffer_set->num_buffers < 1) {
927        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
928        pthread_mutex_unlock(&mMutex);
929        return -EINVAL;
930    }
931    if (buffer_set->buffers == NULL) {
932        ALOGE("%s: Invalid buffers parameter.", __func__);
933        pthread_mutex_unlock(&mMutex);
934        return -EINVAL;
935    }
936
937    camera3_stream_t *stream = buffer_set->stream;
938    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
939
940    //set the buffer_set in the mStreamInfo array
941    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
942            it != mStreamInfo.end(); it++) {
943        if ((*it)->stream == stream) {
944            uint32_t numBuffers = buffer_set->num_buffers;
945            (*it)->buffer_set.stream = buffer_set->stream;
946            (*it)->buffer_set.num_buffers = numBuffers;
947            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
948            if ((*it)->buffer_set.buffers == NULL) {
949                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
950                pthread_mutex_unlock(&mMutex);
951                return -ENOMEM;
952            }
953            for (size_t j = 0; j < numBuffers; j++){
954                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
955            }
956            (*it)->registered = 1;
957        }
958    }
959    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
960    if (rc < 0) {
961        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
962        pthread_mutex_unlock(&mMutex);
963        return -ENODEV;
964    }
965
966    pthread_mutex_unlock(&mMutex);
967    return NO_ERROR;
968}
969
970/*===========================================================================
971 * FUNCTION   : processCaptureRequest
972 *
973 * DESCRIPTION: process a capture request from camera service
974 *
975 * PARAMETERS :
976 *   @request : request from framework to process
977 *
978 * RETURN     :
979 *
980 *==========================================================================*/
981int QCamera3HardwareInterface::processCaptureRequest(
982                    camera3_capture_request_t *request)
983{
984    int rc = NO_ERROR;
985    int32_t request_id;
986    CameraMetadata meta;
987    MetadataBufferInfo reproc_meta;
988    int queueMetadata = 0;
989
990    pthread_mutex_lock(&mMutex);
991
992    rc = validateCaptureRequest(request);
993    if (rc != NO_ERROR) {
994        ALOGE("%s: incoming request is not valid", __func__);
995        pthread_mutex_unlock(&mMutex);
996        return rc;
997    }
998
999    meta = request->settings;
1000
1001    // For first capture request, send capture intent, and
1002    // stream on all streams
1003    if (mFirstRequest) {
1004
1005        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1006            int32_t hal_version = CAM_HAL_V3;
1007            uint8_t captureIntent =
1008                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1009
1010            memset(mParameters, 0, sizeof(parm_buffer_t));
1011            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1012            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1013                sizeof(hal_version), &hal_version);
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1015                sizeof(captureIntent), &captureIntent);
1016            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1017                mParameters);
1018        }
1019
1020        mMetadataChannel->start();
1021        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1022            it != mStreamInfo.end(); it++) {
1023            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1024            channel->start();
1025        }
1026    }
1027
1028    uint32_t frameNumber = request->frame_number;
1029    uint32_t streamTypeMask = 0;
1030
1031    if (meta.exists(ANDROID_REQUEST_ID)) {
1032        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1033        mCurrentRequestId = request_id;
1034        ALOGV("%s: Received request with id: %d",__func__, request_id);
1035    } else if (mFirstRequest || mCurrentRequestId == -1){
1036        ALOGE("%s: Unable to find request id field, \
1037                & no previous id available", __func__);
1038        return NAME_NOT_FOUND;
1039    } else {
1040        ALOGV("%s: Re-using old request id", __func__);
1041        request_id = mCurrentRequestId;
1042    }
1043
1044    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1045                                    __func__, __LINE__,
1046                                    request->num_output_buffers,
1047                                    request->input_buffer,
1048                                    frameNumber);
1049    // Acquire all request buffers first
1050    int blob_request = 0;
1051    for (size_t i = 0; i < request->num_output_buffers; i++) {
1052        const camera3_stream_buffer_t& output = request->output_buffers[i];
1053        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1054        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1055
1056        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1057        //Call function to store local copy of jpeg data for encode params.
1058            blob_request = 1;
1059            rc = getJpegSettings(request->settings);
1060            if (rc < 0) {
1061                ALOGE("%s: failed to get jpeg parameters", __func__);
1062                pthread_mutex_unlock(&mMutex);
1063                return rc;
1064            }
1065        }
1066
1067        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1068        if (rc != OK) {
1069            ALOGE("%s: fence wait failed %d", __func__, rc);
1070            pthread_mutex_unlock(&mMutex);
1071            return rc;
1072        }
1073        streamTypeMask |= channel->getStreamTypeMask();
1074    }
1075
1076    rc = setFrameParameters(request, streamTypeMask);
1077    if (rc < 0) {
1078        ALOGE("%s: fail to set frame parameters", __func__);
1079        pthread_mutex_unlock(&mMutex);
1080        return rc;
1081    }
1082
1083    /* Update pending request list and pending buffers map */
1084    PendingRequestInfo pendingRequest;
1085    pendingRequest.frame_number = frameNumber;
1086    pendingRequest.num_buffers = request->num_output_buffers;
1087    pendingRequest.request_id = request_id;
1088    pendingRequest.blob_request = blob_request;
1089    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1090
1091    for (size_t i = 0; i < request->num_output_buffers; i++) {
1092        RequestedBufferInfo requestedBuf;
1093        requestedBuf.stream = request->output_buffers[i].stream;
1094        requestedBuf.buffer = NULL;
1095        pendingRequest.buffers.push_back(requestedBuf);
1096
1097        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1098    }
1099    mPendingRequestsList.push_back(pendingRequest);
1100
1101    // Notify metadata channel we receive a request
1102    mMetadataChannel->request(NULL, frameNumber);
1103
1104    // Call request on other streams
1105    for (size_t i = 0; i < request->num_output_buffers; i++) {
1106        const camera3_stream_buffer_t& output = request->output_buffers[i];
1107        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1108        mm_camera_buf_def_t *pInputBuffer = NULL;
1109
1110        if (channel == NULL) {
1111            ALOGE("%s: invalid channel pointer for stream", __func__);
1112            continue;
1113        }
1114
1115        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1116            QCamera3RegularChannel* inputChannel = NULL;
1117            if(request->input_buffer != NULL){
1118                //Try to get the internal format
1119                inputChannel = (QCamera3RegularChannel*)
1120                    request->input_buffer->stream->priv;
1121                if(inputChannel == NULL ){
1122                    ALOGE("%s: failed to get input channel handle", __func__);
1123                } else {
1124                    pInputBuffer =
1125                        inputChannel->getInternalFormatBuffer(
1126                                request->input_buffer->buffer);
1127                    ALOGD("%s: Input buffer dump",__func__);
1128                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1129                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1130                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1131                    ALOGD("Handle:%p", request->input_buffer->buffer);
1132                    //TODO: need to get corresponding metadata and send it to pproc
1133                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1134                         m != mStoredMetadataList.end(); m++) {
1135                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1136                            reproc_meta.meta_buf = m->meta_buf;
1137                            queueMetadata = 1;
1138                            break;
1139                        }
1140                    }
1141                }
1142            }
1143            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1144                            pInputBuffer,(QCamera3Channel*)inputChannel);
1145            if (queueMetadata) {
1146                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1147            }
1148        } else {
1149            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1150                __LINE__, output.buffer, frameNumber);
1151            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1152                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1153                     m != mStoredMetadataList.end(); m++) {
1154                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1155                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1156                            mMetadataChannel->bufDone(m->meta_buf);
1157                            free(m->meta_buf);
1158                            m = mStoredMetadataList.erase(m);
1159                            break;
1160                        }
1161                   }
1162                }
1163            }
1164            rc = channel->request(output.buffer, frameNumber);
1165        }
1166        if (rc < 0)
1167            ALOGE("%s: request failed", __func__);
1168    }
1169
1170    mFirstRequest = false;
1171
1172    //Block on conditional variable
1173    mPendingRequest = 1;
1174    while (mPendingRequest == 1) {
1175        pthread_cond_wait(&mRequestCond, &mMutex);
1176    }
1177
1178    pthread_mutex_unlock(&mMutex);
1179    return rc;
1180}
1181
1182/*===========================================================================
1183 * FUNCTION   : getMetadataVendorTagOps
1184 *
1185 * DESCRIPTION:
1186 *
1187 * PARAMETERS :
1188 *
1189 *
1190 * RETURN     :
1191 *==========================================================================*/
1192void QCamera3HardwareInterface::getMetadataVendorTagOps(
1193                    vendor_tag_query_ops_t* /*ops*/)
1194{
1195    /* Enable locks when we eventually add Vendor Tags */
1196    /*
1197    pthread_mutex_lock(&mMutex);
1198
1199    pthread_mutex_unlock(&mMutex);
1200    */
1201    return;
1202}
1203
1204/*===========================================================================
1205 * FUNCTION   : dump
1206 *
1207 * DESCRIPTION:
1208 *
1209 * PARAMETERS :
1210 *
1211 *
1212 * RETURN     :
1213 *==========================================================================*/
1214void QCamera3HardwareInterface::dump(int /*fd*/)
1215{
1216    /*Enable lock when we implement this function*/
1217    /*
1218    pthread_mutex_lock(&mMutex);
1219
1220    pthread_mutex_unlock(&mMutex);
1221    */
1222    return;
1223}
1224
1225/*===========================================================================
1226 * FUNCTION   : flush
1227 *
1228 * DESCRIPTION:
1229 *
1230 * PARAMETERS :
1231 *
1232 *
1233 * RETURN     :
1234 *==========================================================================*/
1235int QCamera3HardwareInterface::flush()
1236{
1237    /*Enable lock when we implement this function*/
1238    /*
1239    pthread_mutex_lock(&mMutex);
1240
1241    pthread_mutex_unlock(&mMutex);
1242    */
1243    return 0;
1244}
1245
1246/*===========================================================================
1247 * FUNCTION   : captureResultCb
1248 *
1249 * DESCRIPTION: Callback handler for all capture result
1250 *              (streams, as well as metadata)
1251 *
1252 * PARAMETERS :
1253 *   @metadata : metadata information
1254 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1255 *               NULL if metadata.
1256 *
1257 * RETURN     : NONE
1258 *==========================================================================*/
1259void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1260                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1261{
1262    pthread_mutex_lock(&mMutex);
1263
1264    if (metadata_buf) {
1265        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1266        int32_t frame_number_valid = *(int32_t *)
1267            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1268        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1269            CAM_INTF_META_PENDING_REQUESTS, metadata);
1270        uint32_t frame_number = *(uint32_t *)
1271            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1272        const struct timeval *tv = (const struct timeval *)
1273            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1274        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1275            tv->tv_usec * NSEC_PER_USEC;
1276
1277        if (!frame_number_valid) {
1278            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1279            mMetadataChannel->bufDone(metadata_buf);
1280            goto done_metadata;
1281        }
1282        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1283                frame_number, capture_time);
1284
1285        // Go through the pending requests info and send shutter/results to frameworks
1286        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1287                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1288            camera3_capture_result_t result;
1289            camera3_notify_msg_t notify_msg;
1290            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1291
1292            // Flush out all entries with less or equal frame numbers.
1293
1294            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1295            //Right now it's the same as metadata timestamp
1296
1297            //TODO: When there is metadata drop, how do we derive the timestamp of
1298            //dropped frames? For now, we fake the dropped timestamp by substracting
1299            //from the reported timestamp
1300            nsecs_t current_capture_time = capture_time -
1301                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1302
1303            // Send shutter notify to frameworks
1304            notify_msg.type = CAMERA3_MSG_SHUTTER;
1305            notify_msg.message.shutter.frame_number = i->frame_number;
1306            notify_msg.message.shutter.timestamp = current_capture_time;
1307            mCallbackOps->notify(mCallbackOps, &notify_msg);
1308            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1309                    i->frame_number, capture_time);
1310
1311            // Send empty metadata with already filled buffers for dropped metadata
1312            // and send valid metadata with already filled buffers for current metadata
1313            if (i->frame_number < frame_number) {
1314                CameraMetadata dummyMetadata;
1315                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1316                        &current_capture_time, 1);
1317                dummyMetadata.update(ANDROID_REQUEST_ID,
1318                        &(i->request_id), 1);
1319                result.result = dummyMetadata.release();
1320            } else {
1321                result.result = translateCbMetadataToResultMetadata(metadata,
1322                        current_capture_time, i->request_id);
1323                if (mIsZslMode) {
1324                   int found_metadata = 0;
1325                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1326                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1327                        j != i->buffers.end(); j++) {
1328                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1329                         //check if corresp. zsl already exists in the stored metadata list
1330                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1331                               m != mStoredMetadataList.begin(); m++) {
1332                            if (m->frame_number == frame_number) {
1333                               m->meta_buf = metadata_buf;
1334                               found_metadata = 1;
1335                               break;
1336                            }
1337                         }
1338                         if (!found_metadata) {
1339                            MetadataBufferInfo store_meta_info;
1340                            store_meta_info.meta_buf = metadata_buf;
1341                            store_meta_info.frame_number = frame_number;
1342                            mStoredMetadataList.push_back(store_meta_info);
1343                            found_metadata = 1;
1344                         }
1345                      }
1346                   }
1347                   if (!found_metadata) {
1348                       if (!i->input_buffer_present && i->blob_request) {
1349                          //livesnapshot or fallback non-zsl snapshot case
1350                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1351                                j != i->buffers.end(); j++){
1352                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1353                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1354                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1355                                 break;
1356                              }
1357                         }
1358                       } else {
1359                            //return the metadata immediately
1360                            mMetadataChannel->bufDone(metadata_buf);
1361                            free(metadata_buf);
1362                       }
1363                   }
1364               } else if (!mIsZslMode && i->blob_request) {
1365                   //If it is a blob request then send the metadata to the picture channel
1366                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1367               } else {
1368                   // Return metadata buffer
1369                   mMetadataChannel->bufDone(metadata_buf);
1370                   free(metadata_buf);
1371               }
1372
1373            }
1374            if (!result.result) {
1375                ALOGE("%s: metadata is NULL", __func__);
1376            }
1377            result.frame_number = i->frame_number;
1378            result.num_output_buffers = 0;
1379            result.output_buffers = NULL;
1380            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1381                    j != i->buffers.end(); j++) {
1382                if (j->buffer) {
1383                    result.num_output_buffers++;
1384                }
1385            }
1386
1387            if (result.num_output_buffers > 0) {
1388                camera3_stream_buffer_t *result_buffers =
1389                    new camera3_stream_buffer_t[result.num_output_buffers];
1390                if (!result_buffers) {
1391                    ALOGE("%s: Fatal error: out of memory", __func__);
1392                }
1393                size_t result_buffers_idx = 0;
1394                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1395                        j != i->buffers.end(); j++) {
1396                    if (j->buffer) {
1397                        result_buffers[result_buffers_idx++] = *(j->buffer);
1398                        free(j->buffer);
1399                        j->buffer = NULL;
1400                        mPendingBuffersMap.editValueFor(j->stream)--;
1401                    }
1402                }
1403                result.output_buffers = result_buffers;
1404
1405                mCallbackOps->process_capture_result(mCallbackOps, &result);
1406                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1407                        __func__, result.frame_number, current_capture_time);
1408                free_camera_metadata((camera_metadata_t *)result.result);
1409                delete[] result_buffers;
1410            } else {
1411                mCallbackOps->process_capture_result(mCallbackOps, &result);
1412                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1413                        __func__, result.frame_number, current_capture_time);
1414                free_camera_metadata((camera_metadata_t *)result.result);
1415            }
1416            // erase the element from the list
1417            i = mPendingRequestsList.erase(i);
1418        }
1419
1420
1421done_metadata:
1422        bool max_buffers_dequeued = false;
1423        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1424            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1425            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1426            if (queued_buffers == stream->max_buffers) {
1427                max_buffers_dequeued = true;
1428                break;
1429            }
1430        }
1431        if (!max_buffers_dequeued && !pending_requests) {
1432            // Unblock process_capture_request
1433            mPendingRequest = 0;
1434            pthread_cond_signal(&mRequestCond);
1435        }
1436    } else {
1437        // If the frame number doesn't exist in the pending request list,
1438        // directly send the buffer to the frameworks, and update pending buffers map
1439        // Otherwise, book-keep the buffer.
1440        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1441        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1442            i++;
1443        }
1444        if (i == mPendingRequestsList.end()) {
1445            // Verify all pending requests frame_numbers are greater
1446            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1447                    j != mPendingRequestsList.end(); j++) {
1448                if (j->frame_number < frame_number) {
1449                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1450                            __func__, j->frame_number, frame_number);
1451                }
1452            }
1453            camera3_capture_result_t result;
1454            result.result = NULL;
1455            result.frame_number = frame_number;
1456            result.num_output_buffers = 1;
1457            result.output_buffers = buffer;
1458            ALOGV("%s: result frame_number = %d, buffer = %p",
1459                    __func__, frame_number, buffer);
1460            mPendingBuffersMap.editValueFor(buffer->stream)--;
1461            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1462                int found = 0;
1463                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1464                      k != mStoredMetadataList.end(); k++) {
1465                    if (k->frame_number == frame_number) {
1466                        k->zsl_buf_hdl = buffer->buffer;
1467                        found = 1;
1468                        break;
1469                    }
1470                }
1471                if (!found) {
1472                   MetadataBufferInfo meta_info;
1473                   meta_info.frame_number = frame_number;
1474                   meta_info.zsl_buf_hdl = buffer->buffer;
1475                   mStoredMetadataList.push_back(meta_info);
1476                }
1477            }
1478            mCallbackOps->process_capture_result(mCallbackOps, &result);
1479        } else {
1480            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1481                    j != i->buffers.end(); j++) {
1482                if (j->stream == buffer->stream) {
1483                    if (j->buffer != NULL) {
1484                        ALOGE("%s: Error: buffer is already set", __func__);
1485                    } else {
1486                        j->buffer = (camera3_stream_buffer_t *)malloc(
1487                                sizeof(camera3_stream_buffer_t));
1488                        *(j->buffer) = *buffer;
1489                        ALOGV("%s: cache buffer %p at result frame_number %d",
1490                                __func__, buffer, frame_number);
1491                    }
1492                }
1493            }
1494        }
1495    }
1496    pthread_mutex_unlock(&mMutex);
1497    return;
1498}
1499
1500/*===========================================================================
1501 * FUNCTION   : translateCbMetadataToResultMetadata
1502 *
1503 * DESCRIPTION:
1504 *
1505 * PARAMETERS :
1506 *   @metadata : metadata information from callback
1507 *
1508 * RETURN     : camera_metadata_t*
1509 *              metadata in a format specified by fwk
1510 *==========================================================================*/
1511camera_metadata_t*
1512QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1513                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1514                                 int32_t request_id)
1515{
1516    CameraMetadata camMetadata;
1517    camera_metadata_t* resultMetadata;
1518
1519    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1520    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1521
1522    /*CAM_INTF_META_HISTOGRAM - TODO*/
1523    /*cam_hist_stats_t  *histogram =
1524      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1525      metadata);*/
1526
1527    /*face detection*/
1528    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1529        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1530    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1531    int32_t faceIds[numFaces];
1532    uint8_t faceScores[numFaces];
1533    int32_t faceRectangles[numFaces * 4];
1534    int32_t faceLandmarks[numFaces * 6];
1535    int j = 0, k = 0;
1536    for (int i = 0; i < numFaces; i++) {
1537        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1538        faceScores[i] = faceDetectionInfo->faces[i].score;
1539        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1540                faceRectangles+j, -1);
1541        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1542        j+= 4;
1543        k+= 6;
1544    }
1545    if (numFaces > 0) {
1546        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1547        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1548        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1549            faceRectangles, numFaces*4);
1550        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1551            faceLandmarks, numFaces*6);
1552    }
1553
1554    uint8_t  *color_correct_mode =
1555        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1556    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1557
1558    int32_t  *ae_precapture_id =
1559        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1560    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1561
1562    /*aec regions*/
1563    cam_area_t  *hAeRegions =
1564        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1565    int32_t aeRegions[5];
1566    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1567    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1568
1569    uint8_t *ae_state =
1570            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1571    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1572
1573    uint8_t  *focusMode =
1574        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1575    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1576
1577    /*af regions*/
1578    cam_area_t  *hAfRegions =
1579        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1580    int32_t afRegions[5];
1581    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1582    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1583
1584    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1585    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1586
1587    int32_t  *afTriggerId =
1588        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1589    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1590
1591    uint8_t  *whiteBalance =
1592        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1593    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1594
1595    /*awb regions*/
1596    cam_area_t  *hAwbRegions =
1597        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1598    int32_t awbRegions[5];
1599    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1600    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1601
1602    uint8_t  *whiteBalanceState =
1603        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1604    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1605
1606    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1607    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1608
1609    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1610    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1611
1612    uint8_t  *flashPower =
1613        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1614    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1615
1616    int64_t  *flashFiringTime =
1617        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1618    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1619
1620    /*int32_t  *ledMode =
1621      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1622      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1623
1624    uint8_t  *flashState =
1625        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1626    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1627
1628    uint8_t  *hotPixelMode =
1629        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1630    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1631
1632    float  *lensAperture =
1633        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1634    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1635
1636    float  *filterDensity =
1637        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1638    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1639
1640    float  *focalLength =
1641        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1642    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1643
1644    float  *focusDistance =
1645        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1646    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1647
1648    float  *focusRange =
1649        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1650    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1651
1652    uint8_t  *opticalStab =
1653        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1654    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1655
1656    /*int32_t  *focusState =
1657      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1658      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1659
1660    uint8_t  *noiseRedMode =
1661        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1662    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1663
1664    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1665
1666    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1667        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1668    int32_t scalerCropRegion[4];
1669    scalerCropRegion[0] = hScalerCropRegion->left;
1670    scalerCropRegion[1] = hScalerCropRegion->top;
1671    scalerCropRegion[2] = hScalerCropRegion->width;
1672    scalerCropRegion[3] = hScalerCropRegion->height;
1673    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1674
1675    int64_t  *sensorExpTime =
1676        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1677    mMetadataResponse.exposure_time = *sensorExpTime;
1678    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1679    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1680
1681    int64_t  *sensorFameDuration =
1682        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1683    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1684    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1685
1686    int32_t  *sensorSensitivity =
1687        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1688    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1689    mMetadataResponse.iso_speed = *sensorSensitivity;
1690    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1691
1692    uint8_t  *shadingMode =
1693        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1694    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1695
1696    uint8_t  *faceDetectMode =
1697        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1698    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1699        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1700        *faceDetectMode);
1701    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1702
1703    uint8_t  *histogramMode =
1704        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1705    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1706
1707    uint8_t  *sharpnessMapMode =
1708        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1709    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1710            sharpnessMapMode, 1);
1711
1712    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1713    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1714        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1715    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1716            (int32_t*)sharpnessMap->sharpness,
1717            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1718
1719    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1720        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1721    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1722    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1723    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1724                       (float*)lensShadingMap->lens_shading,
1725                       4*map_width*map_height);
1726
1727    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1728        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1729    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1730
1731    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1732        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1733    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1734                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1735
1736    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1737        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1738    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1739                       predColorCorrectionGains->gains, 4);
1740
1741    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1742        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1743    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1744                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1745
1746    uint8_t *blackLevelLock = (uint8_t*)
1747        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1748    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1749
1750    uint8_t *sceneFlicker = (uint8_t*)
1751        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1752    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1753
1754
1755    resultMetadata = camMetadata.release();
1756    return resultMetadata;
1757}
1758
1759/*===========================================================================
1760 * FUNCTION   : convertToRegions
1761 *
1762 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1763 *
1764 * PARAMETERS :
1765 *   @rect   : cam_rect_t struct to convert
1766 *   @region : int32_t destination array
1767 *   @weight : if we are converting from cam_area_t, weight is valid
1768 *             else weight = -1
1769 *
1770 *==========================================================================*/
1771void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1772    region[0] = rect.left;
1773    region[1] = rect.top;
1774    region[2] = rect.left + rect.width;
1775    region[3] = rect.top + rect.height;
1776    if (weight > -1) {
1777        region[4] = weight;
1778    }
1779}
1780
1781/*===========================================================================
1782 * FUNCTION   : convertFromRegions
1783 *
1784 * DESCRIPTION: helper method to convert from array to cam_rect_t
1785 *
1786 * PARAMETERS :
1787 *   @rect   : cam_rect_t struct to convert
1788 *   @region : int32_t destination array
1789 *   @weight : if we are converting from cam_area_t, weight is valid
1790 *             else weight = -1
1791 *
1792 *==========================================================================*/
1793void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1794                                                   const camera_metadata_t *settings,
1795                                                   uint32_t tag){
1796    CameraMetadata frame_settings;
1797    frame_settings = settings;
1798    int32_t x_min = frame_settings.find(tag).data.i32[0];
1799    int32_t y_min = frame_settings.find(tag).data.i32[1];
1800    int32_t x_max = frame_settings.find(tag).data.i32[2];
1801    int32_t y_max = frame_settings.find(tag).data.i32[3];
1802    roi->weight = frame_settings.find(tag).data.i32[4];
1803    roi->rect.left = x_min;
1804    roi->rect.top = y_min;
1805    roi->rect.width = x_max - x_min;
1806    roi->rect.height = y_max - y_min;
1807}
1808
1809/*===========================================================================
1810 * FUNCTION   : resetIfNeededROI
1811 *
1812 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1813 *              crop region
1814 *
1815 * PARAMETERS :
1816 *   @roi       : cam_area_t struct to resize
1817 *   @scalerCropRegion : cam_crop_region_t region to compare against
1818 *
1819 *
1820 *==========================================================================*/
1821bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1822                                                 const cam_crop_region_t* scalerCropRegion)
1823{
1824    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1825    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1826    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1827    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1828    if ((roi_x_max < scalerCropRegion->left) ||
1829        (roi_y_max < scalerCropRegion->top)  ||
1830        (roi->rect.left > crop_x_max) ||
1831        (roi->rect.top > crop_y_max)){
1832        return false;
1833    }
1834    if (roi->rect.left < scalerCropRegion->left) {
1835        roi->rect.left = scalerCropRegion->left;
1836    }
1837    if (roi->rect.top < scalerCropRegion->top) {
1838        roi->rect.top = scalerCropRegion->top;
1839    }
1840    if (roi_x_max > crop_x_max) {
1841        roi_x_max = crop_x_max;
1842    }
1843    if (roi_y_max > crop_y_max) {
1844        roi_y_max = crop_y_max;
1845    }
1846    roi->rect.width = roi_x_max - roi->rect.left;
1847    roi->rect.height = roi_y_max - roi->rect.top;
1848    return true;
1849}
1850
1851/*===========================================================================
1852 * FUNCTION   : convertLandmarks
1853 *
1854 * DESCRIPTION: helper method to extract the landmarks from face detection info
1855 *
1856 * PARAMETERS :
1857 *   @face   : cam_rect_t struct to convert
1858 *   @landmarks : int32_t destination array
1859 *
1860 *
1861 *==========================================================================*/
1862void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1863{
1864    landmarks[0] = face.left_eye_center.x;
1865    landmarks[1] = face.left_eye_center.y;
1866    landmarks[2] = face.right_eye_center.y;
1867    landmarks[3] = face.right_eye_center.y;
1868    landmarks[4] = face.mouth_center.x;
1869    landmarks[5] = face.mouth_center.y;
1870}
1871
1872#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1873/*===========================================================================
1874 * FUNCTION   : initCapabilities
1875 *
1876 * DESCRIPTION: initialize camera capabilities in static data struct
1877 *
1878 * PARAMETERS :
1879 *   @cameraId  : camera Id
1880 *
1881 * RETURN     : int32_t type of status
1882 *              NO_ERROR  -- success
1883 *              none-zero failure code
1884 *==========================================================================*/
1885int QCamera3HardwareInterface::initCapabilities(int cameraId)
1886{
1887    int rc = 0;
1888    mm_camera_vtbl_t *cameraHandle = NULL;
1889    QCamera3HeapMemory *capabilityHeap = NULL;
1890
1891    cameraHandle = camera_open(cameraId);
1892    if (!cameraHandle) {
1893        ALOGE("%s: camera_open failed", __func__);
1894        rc = -1;
1895        goto open_failed;
1896    }
1897
1898    capabilityHeap = new QCamera3HeapMemory();
1899    if (capabilityHeap == NULL) {
1900        ALOGE("%s: creation of capabilityHeap failed", __func__);
1901        goto heap_creation_failed;
1902    }
1903    /* Allocate memory for capability buffer */
1904    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1905    if(rc != OK) {
1906        ALOGE("%s: No memory for cappability", __func__);
1907        goto allocate_failed;
1908    }
1909
1910    /* Map memory for capability buffer */
1911    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1912    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1913                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1914                                capabilityHeap->getFd(0),
1915                                sizeof(cam_capability_t));
1916    if(rc < 0) {
1917        ALOGE("%s: failed to map capability buffer", __func__);
1918        goto map_failed;
1919    }
1920
1921    /* Query Capability */
1922    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1923    if(rc < 0) {
1924        ALOGE("%s: failed to query capability",__func__);
1925        goto query_failed;
1926    }
1927    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1928    if (!gCamCapability[cameraId]) {
1929        ALOGE("%s: out of memory", __func__);
1930        goto query_failed;
1931    }
1932    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1933                                        sizeof(cam_capability_t));
1934    rc = 0;
1935
1936query_failed:
1937    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1938                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1939map_failed:
1940    capabilityHeap->deallocate();
1941allocate_failed:
1942    delete capabilityHeap;
1943heap_creation_failed:
1944    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1945    cameraHandle = NULL;
1946open_failed:
1947    return rc;
1948}
1949
1950/*===========================================================================
1951 * FUNCTION   : initParameters
1952 *
1953 * DESCRIPTION: initialize camera parameters
1954 *
1955 * PARAMETERS :
1956 *
1957 * RETURN     : int32_t type of status
1958 *              NO_ERROR  -- success
1959 *              none-zero failure code
1960 *==========================================================================*/
1961int QCamera3HardwareInterface::initParameters()
1962{
1963    int rc = 0;
1964
1965    //Allocate Set Param Buffer
1966    mParamHeap = new QCamera3HeapMemory();
1967    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1968    if(rc != OK) {
1969        rc = NO_MEMORY;
1970        ALOGE("Failed to allocate SETPARM Heap memory");
1971        delete mParamHeap;
1972        mParamHeap = NULL;
1973        return rc;
1974    }
1975
1976    //Map memory for parameters buffer
1977    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1978            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1979            mParamHeap->getFd(0),
1980            sizeof(parm_buffer_t));
1981    if(rc < 0) {
1982        ALOGE("%s:failed to map SETPARM buffer",__func__);
1983        rc = FAILED_TRANSACTION;
1984        mParamHeap->deallocate();
1985        delete mParamHeap;
1986        mParamHeap = NULL;
1987        return rc;
1988    }
1989
1990    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1991    return rc;
1992}
1993
1994/*===========================================================================
1995 * FUNCTION   : deinitParameters
1996 *
1997 * DESCRIPTION: de-initialize camera parameters
1998 *
1999 * PARAMETERS :
2000 *
2001 * RETURN     : NONE
2002 *==========================================================================*/
2003void QCamera3HardwareInterface::deinitParameters()
2004{
2005    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2006            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2007
2008    mParamHeap->deallocate();
2009    delete mParamHeap;
2010    mParamHeap = NULL;
2011
2012    mParameters = NULL;
2013}
2014
2015/*===========================================================================
2016 * FUNCTION   : calcMaxJpegSize
2017 *
2018 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2019 *
2020 * PARAMETERS :
2021 *
2022 * RETURN     : max_jpeg_size
2023 *==========================================================================*/
2024int QCamera3HardwareInterface::calcMaxJpegSize()
2025{
2026    int32_t max_jpeg_size = 0;
2027    int temp_width, temp_height;
2028    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2029        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2030        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2031        if (temp_width * temp_height > max_jpeg_size ) {
2032            max_jpeg_size = temp_width * temp_height;
2033        }
2034    }
2035    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2036    return max_jpeg_size;
2037}
2038
2039/*===========================================================================
2040 * FUNCTION   : initStaticMetadata
2041 *
2042 * DESCRIPTION: initialize the static metadata
2043 *
2044 * PARAMETERS :
2045 *   @cameraId  : camera Id
2046 *
2047 * RETURN     : int32_t type of status
2048 *              0  -- success
2049 *              non-zero failure code
2050 *==========================================================================*/
2051int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2052{
2053    int rc = 0;
2054    CameraMetadata staticInfo;
2055
2056    /* android.info: hardware level */
2057    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2058    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2059        &supportedHardwareLevel, 1);
2060
2061    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2062    /*HAL 3 only*/
2063    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2064                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2065
2066    /*hard coded for now but this should come from sensor*/
2067    float min_focus_distance;
2068    if(facingBack){
2069        min_focus_distance = 10;
2070    } else {
2071        min_focus_distance = 0;
2072    }
2073    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2074                    &min_focus_distance, 1);
2075
2076    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2077                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2078
2079    /*should be using focal lengths but sensor doesn't provide that info now*/
2080    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2081                      &gCamCapability[cameraId]->focal_length,
2082                      1);
2083
2084    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2085                      gCamCapability[cameraId]->apertures,
2086                      gCamCapability[cameraId]->apertures_count);
2087
2088    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2089                gCamCapability[cameraId]->filter_densities,
2090                gCamCapability[cameraId]->filter_densities_count);
2091
2092
2093    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2094                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2095                      gCamCapability[cameraId]->optical_stab_modes_count);
2096
2097    staticInfo.update(ANDROID_LENS_POSITION,
2098                      gCamCapability[cameraId]->lens_position,
2099                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2100
2101    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2102                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2103    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2104                      lens_shading_map_size,
2105                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2106
2107    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2108                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2109    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2110            geo_correction_map_size,
2111            sizeof(geo_correction_map_size)/sizeof(int32_t));
2112
2113    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2114                       gCamCapability[cameraId]->geo_correction_map,
2115                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2116
2117    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2118            gCamCapability[cameraId]->sensor_physical_size, 2);
2119
2120    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2121            gCamCapability[cameraId]->exposure_time_range, 2);
2122
2123    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2124            &gCamCapability[cameraId]->max_frame_duration, 1);
2125
2126
2127    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2128                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2129
2130    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2131                                               gCamCapability[cameraId]->pixel_array_size.height};
2132    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2133                      pixel_array_size, 2);
2134
2135    int32_t active_array_size[] = {0, 0,
2136                                                gCamCapability[cameraId]->active_array_size.width,
2137                                                gCamCapability[cameraId]->active_array_size.height};
2138    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2139                      active_array_size, 4);
2140
2141    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2142            &gCamCapability[cameraId]->white_level, 1);
2143
2144    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2145            gCamCapability[cameraId]->black_level_pattern, 4);
2146
2147    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2148                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2149
2150    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2151                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2152
2153    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2154                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2155
2156    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2157                      &gCamCapability[cameraId]->histogram_size, 1);
2158
2159    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2160            &gCamCapability[cameraId]->max_histogram_count, 1);
2161
2162    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2163                                                gCamCapability[cameraId]->sharpness_map_size.height};
2164
2165    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2166            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2167
2168    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2169            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2170
2171
2172    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2173                      &gCamCapability[cameraId]->raw_min_duration,
2174                       1);
2175
2176    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2177                                                HAL_PIXEL_FORMAT_BLOB};
2178    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2179    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2180                      scalar_formats,
2181                      scalar_formats_count);
2182
2183    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2184    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2185              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2186              available_processed_sizes);
2187    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2188                available_processed_sizes,
2189                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2190
2191    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2192                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2193                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2194
2195    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2196    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2197                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2198                 available_fps_ranges);
2199    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2200            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2201
2202    camera_metadata_rational exposureCompensationStep = {
2203            gCamCapability[cameraId]->exp_compensation_step.numerator,
2204            gCamCapability[cameraId]->exp_compensation_step.denominator};
2205    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2206                      &exposureCompensationStep, 1);
2207
2208    /*TO DO*/
2209    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2210    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2211                      availableVstabModes, sizeof(availableVstabModes));
2212
2213    /*HAL 1 and HAL 3 common*/
2214    float maxZoom = 4;
2215    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2216            &maxZoom, 1);
2217
2218    int32_t max3aRegions = 1;
2219    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2220            &max3aRegions, 1);
2221
2222    uint8_t availableFaceDetectModes[] = {
2223            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2224            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2225    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2226                      availableFaceDetectModes,
2227                      sizeof(availableFaceDetectModes));
2228
2229    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2230                                       gCamCapability[cameraId]->raw_dim.height};
2231    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2232                      raw_size,
2233                      sizeof(raw_size)/sizeof(uint32_t));
2234
2235    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2236                                                        gCamCapability[cameraId]->exposure_compensation_max};
2237    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2238            exposureCompensationRange,
2239            sizeof(exposureCompensationRange)/sizeof(int32_t));
2240
2241    uint8_t lensFacing = (facingBack) ?
2242            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2243    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2244
2245    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2246                available_processed_sizes,
2247                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2248
2249    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2250                      available_thumbnail_sizes,
2251                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2252
2253    int32_t max_jpeg_size = 0;
2254    int temp_width, temp_height;
2255    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2256        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2257        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2258        if (temp_width * temp_height > max_jpeg_size ) {
2259            max_jpeg_size = temp_width * temp_height;
2260        }
2261    }
2262    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2263    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2264                      &max_jpeg_size, 1);
2265
2266    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2267    int32_t size = 0;
2268    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2269        int val = lookupFwkName(EFFECT_MODES_MAP,
2270                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2271                                   gCamCapability[cameraId]->supported_effects[i]);
2272        if (val != NAME_NOT_FOUND) {
2273            avail_effects[size] = (uint8_t)val;
2274            size++;
2275        }
2276    }
2277    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2278                      avail_effects,
2279                      size);
2280
2281    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2282    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2283    int32_t supported_scene_modes_cnt = 0;
2284    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2285        int val = lookupFwkName(SCENE_MODES_MAP,
2286                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2287                                gCamCapability[cameraId]->supported_scene_modes[i]);
2288        if (val != NAME_NOT_FOUND) {
2289            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2290            supported_indexes[supported_scene_modes_cnt] = i;
2291            supported_scene_modes_cnt++;
2292        }
2293    }
2294
2295    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2296                      avail_scene_modes,
2297                      supported_scene_modes_cnt);
2298
2299    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2300    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2301                      supported_scene_modes_cnt,
2302                      scene_mode_overrides,
2303                      supported_indexes,
2304                      cameraId);
2305    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2306                      scene_mode_overrides,
2307                      supported_scene_modes_cnt*3);
2308
2309    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2310    size = 0;
2311    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2312        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2313                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2314                                 gCamCapability[cameraId]->supported_antibandings[i]);
2315        if (val != NAME_NOT_FOUND) {
2316            avail_antibanding_modes[size] = (uint8_t)val;
2317            size++;
2318        }
2319
2320    }
2321    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2322                      avail_antibanding_modes,
2323                      size);
2324
2325    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2326    size = 0;
2327    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2328        int val = lookupFwkName(FOCUS_MODES_MAP,
2329                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2330                                gCamCapability[cameraId]->supported_focus_modes[i]);
2331        if (val != NAME_NOT_FOUND) {
2332            avail_af_modes[size] = (uint8_t)val;
2333            size++;
2334        }
2335    }
2336    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2337                      avail_af_modes,
2338                      size);
2339
2340    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2341    size = 0;
2342    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2343        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2344                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2345                                    gCamCapability[cameraId]->supported_white_balances[i]);
2346        if (val != NAME_NOT_FOUND) {
2347            avail_awb_modes[size] = (uint8_t)val;
2348            size++;
2349        }
2350    }
2351    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2352                      avail_awb_modes,
2353                      size);
2354
2355    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2356    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2357      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2358
2359    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2360            available_flash_levels,
2361            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2362
2363
2364    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2365    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2366            &flashAvailable, 1);
2367
2368    uint8_t avail_ae_modes[5];
2369    size = 0;
2370    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2371        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2372        size++;
2373    }
2374    if (flashAvailable) {
2375        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2376        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2377        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2378    }
2379    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2380                      avail_ae_modes,
2381                      size);
2382
2383    int32_t sensitivity_range[2];
2384    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2385    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2386    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2387                      sensitivity_range,
2388                      sizeof(sensitivity_range) / sizeof(int32_t));
2389
2390    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2391                      &gCamCapability[cameraId]->max_analog_sensitivity,
2392                      1);
2393
2394    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2395                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2396                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2397
2398    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2399    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2400                      &sensor_orientation,
2401                      1);
2402
2403    int32_t max_output_streams[3] = {1, 3, 1};
2404    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2405                      max_output_streams,
2406                      3);
2407
2408    gStaticMetadata[cameraId] = staticInfo.release();
2409    return rc;
2410}
2411
2412/*===========================================================================
2413 * FUNCTION   : makeTable
2414 *
2415 * DESCRIPTION: make a table of sizes
2416 *
2417 * PARAMETERS :
2418 *
2419 *
2420 *==========================================================================*/
2421void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2422                                          int32_t* sizeTable)
2423{
2424    int j = 0;
2425    for (int i = 0; i < size; i++) {
2426        sizeTable[j] = dimTable[i].width;
2427        sizeTable[j+1] = dimTable[i].height;
2428        j+=2;
2429    }
2430}
2431
2432/*===========================================================================
2433 * FUNCTION   : makeFPSTable
2434 *
2435 * DESCRIPTION: make a table of fps ranges
2436 *
2437 * PARAMETERS :
2438 *
2439 *==========================================================================*/
2440void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2441                                          int32_t* fpsRangesTable)
2442{
2443    int j = 0;
2444    for (int i = 0; i < size; i++) {
2445        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2446        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2447        j+=2;
2448    }
2449}
2450
2451/*===========================================================================
2452 * FUNCTION   : makeOverridesList
2453 *
2454 * DESCRIPTION: make a list of scene mode overrides
2455 *
2456 * PARAMETERS :
2457 *
2458 *
2459 *==========================================================================*/
2460void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2461                                                  uint8_t size, uint8_t* overridesList,
2462                                                  uint8_t* supported_indexes,
2463                                                  int camera_id)
2464{
2465    /*daemon will give a list of overrides for all scene modes.
2466      However we should send the fwk only the overrides for the scene modes
2467      supported by the framework*/
2468    int j = 0, index = 0, supt = 0;
2469    uint8_t focus_override;
2470    for (int i = 0; i < size; i++) {
2471        supt = 0;
2472        index = supported_indexes[i];
2473        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2474        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2475                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2476                                                    overridesTable[index].awb_mode);
2477        focus_override = (uint8_t)overridesTable[index].af_mode;
2478        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2479           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2480              supt = 1;
2481              break;
2482           }
2483        }
2484        if (supt) {
2485           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2486                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2487                                              focus_override);
2488        } else {
2489           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2490        }
2491        j+=3;
2492    }
2493}
2494
2495/*===========================================================================
2496 * FUNCTION   : getPreviewHalPixelFormat
2497 *
2498 * DESCRIPTION: convert the format to type recognized by framework
2499 *
2500 * PARAMETERS : format : the format from backend
2501 *
2502 ** RETURN    : format recognized by framework
2503 *
2504 *==========================================================================*/
2505int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2506{
2507    int32_t halPixelFormat;
2508
2509    switch (format) {
2510    case CAM_FORMAT_YUV_420_NV12:
2511        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2512        break;
2513    case CAM_FORMAT_YUV_420_NV21:
2514        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2515        break;
2516    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2517        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2518        break;
2519    case CAM_FORMAT_YUV_420_YV12:
2520        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2521        break;
2522    case CAM_FORMAT_YUV_422_NV16:
2523    case CAM_FORMAT_YUV_422_NV61:
2524    default:
2525        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2526        break;
2527    }
2528    return halPixelFormat;
2529}
2530
2531/*===========================================================================
2532 * FUNCTION   : getSensorSensitivity
2533 *
2534 * DESCRIPTION: convert iso_mode to an integer value
2535 *
2536 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2537 *
2538 ** RETURN    : sensitivity supported by sensor
2539 *
2540 *==========================================================================*/
2541int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2542{
2543    int32_t sensitivity;
2544
2545    switch (iso_mode) {
2546    case CAM_ISO_MODE_100:
2547        sensitivity = 100;
2548        break;
2549    case CAM_ISO_MODE_200:
2550        sensitivity = 200;
2551        break;
2552    case CAM_ISO_MODE_400:
2553        sensitivity = 400;
2554        break;
2555    case CAM_ISO_MODE_800:
2556        sensitivity = 800;
2557        break;
2558    case CAM_ISO_MODE_1600:
2559        sensitivity = 1600;
2560        break;
2561    default:
2562        sensitivity = -1;
2563        break;
2564    }
2565    return sensitivity;
2566}
2567
2568
2569/*===========================================================================
2570 * FUNCTION   : AddSetParmEntryToBatch
2571 *
2572 * DESCRIPTION: add set parameter entry into batch
2573 *
2574 * PARAMETERS :
2575 *   @p_table     : ptr to parameter buffer
2576 *   @paramType   : parameter type
2577 *   @paramLength : length of parameter value
2578 *   @paramValue  : ptr to parameter value
2579 *
2580 * RETURN     : int32_t type of status
2581 *              NO_ERROR  -- success
2582 *              none-zero failure code
2583 *==========================================================================*/
2584int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2585                                                          cam_intf_parm_type_t paramType,
2586                                                          uint32_t paramLength,
2587                                                          void *paramValue)
2588{
2589    int position = paramType;
2590    int current, next;
2591
2592    /*************************************************************************
2593    *                 Code to take care of linking next flags                *
2594    *************************************************************************/
2595    current = GET_FIRST_PARAM_ID(p_table);
2596    if (position == current){
2597        //DO NOTHING
2598    } else if (position < current){
2599        SET_NEXT_PARAM_ID(position, p_table, current);
2600        SET_FIRST_PARAM_ID(p_table, position);
2601    } else {
2602        /* Search for the position in the linked list where we need to slot in*/
2603        while (position > GET_NEXT_PARAM_ID(current, p_table))
2604            current = GET_NEXT_PARAM_ID(current, p_table);
2605
2606        /*If node already exists no need to alter linking*/
2607        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2608            next = GET_NEXT_PARAM_ID(current, p_table);
2609            SET_NEXT_PARAM_ID(current, p_table, position);
2610            SET_NEXT_PARAM_ID(position, p_table, next);
2611        }
2612    }
2613
2614    /*************************************************************************
2615    *                   Copy contents into entry                             *
2616    *************************************************************************/
2617
2618    if (paramLength > sizeof(parm_type_t)) {
2619        ALOGE("%s:Size of input larger than max entry size",__func__);
2620        return BAD_VALUE;
2621    }
2622    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2623    return NO_ERROR;
2624}
2625
2626/*===========================================================================
2627 * FUNCTION   : lookupFwkName
2628 *
2629 * DESCRIPTION: In case the enum is not same in fwk and backend
2630 *              make sure the parameter is correctly propogated
2631 *
2632 * PARAMETERS  :
2633 *   @arr      : map between the two enums
2634 *   @len      : len of the map
2635 *   @hal_name : name of the hal_parm to map
2636 *
2637 * RETURN     : int type of status
2638 *              fwk_name  -- success
2639 *              none-zero failure code
2640 *==========================================================================*/
2641int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2642                                             int len, int hal_name)
2643{
2644
2645    for (int i = 0; i < len; i++) {
2646        if (arr[i].hal_name == hal_name)
2647            return arr[i].fwk_name;
2648    }
2649
2650    /* Not able to find matching framework type is not necessarily
2651     * an error case. This happens when mm-camera supports more attributes
2652     * than the frameworks do */
2653    ALOGD("%s: Cannot find matching framework type", __func__);
2654    return NAME_NOT_FOUND;
2655}
2656
2657/*===========================================================================
2658 * FUNCTION   : lookupHalName
2659 *
2660 * DESCRIPTION: In case the enum is not same in fwk and backend
2661 *              make sure the parameter is correctly propogated
2662 *
2663 * PARAMETERS  :
2664 *   @arr      : map between the two enums
2665 *   @len      : len of the map
2666 *   @fwk_name : name of the hal_parm to map
2667 *
2668 * RETURN     : int32_t type of status
2669 *              hal_name  -- success
2670 *              none-zero failure code
2671 *==========================================================================*/
2672int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2673                                             int len, int fwk_name)
2674{
2675    for (int i = 0; i < len; i++) {
2676       if (arr[i].fwk_name == fwk_name)
2677           return arr[i].hal_name;
2678    }
2679    ALOGE("%s: Cannot find matching hal type", __func__);
2680    return NAME_NOT_FOUND;
2681}
2682
2683/*===========================================================================
2684 * FUNCTION   : getCapabilities
2685 *
2686 * DESCRIPTION: query camera capabilities
2687 *
2688 * PARAMETERS :
2689 *   @cameraId  : camera Id
2690 *   @info      : camera info struct to be filled in with camera capabilities
2691 *
2692 * RETURN     : int32_t type of status
2693 *              NO_ERROR  -- success
2694 *              none-zero failure code
2695 *==========================================================================*/
2696int QCamera3HardwareInterface::getCamInfo(int cameraId,
2697                                    struct camera_info *info)
2698{
2699    int rc = 0;
2700
2701    if (NULL == gCamCapability[cameraId]) {
2702        rc = initCapabilities(cameraId);
2703        if (rc < 0) {
2704            //pthread_mutex_unlock(&g_camlock);
2705            return rc;
2706        }
2707    }
2708
2709    if (NULL == gStaticMetadata[cameraId]) {
2710        rc = initStaticMetadata(cameraId);
2711        if (rc < 0) {
2712            return rc;
2713        }
2714    }
2715
2716    switch(gCamCapability[cameraId]->position) {
2717    case CAM_POSITION_BACK:
2718        info->facing = CAMERA_FACING_BACK;
2719        break;
2720
2721    case CAM_POSITION_FRONT:
2722        info->facing = CAMERA_FACING_FRONT;
2723        break;
2724
2725    default:
2726        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2727        rc = -1;
2728        break;
2729    }
2730
2731
2732    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2733    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2734    info->static_camera_characteristics = gStaticMetadata[cameraId];
2735
2736    return rc;
2737}
2738
2739/*===========================================================================
2740 * FUNCTION   : translateMetadata
2741 *
2742 * DESCRIPTION: translate the metadata into camera_metadata_t
2743 *
2744 * PARAMETERS : type of the request
2745 *
2746 *
2747 * RETURN     : success: camera_metadata_t*
2748 *              failure: NULL
2749 *
2750 *==========================================================================*/
2751camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2752{
2753    pthread_mutex_lock(&mMutex);
2754
2755    if (mDefaultMetadata[type] != NULL) {
2756        pthread_mutex_unlock(&mMutex);
2757        return mDefaultMetadata[type];
2758    }
2759    //first time we are handling this request
2760    //fill up the metadata structure using the wrapper class
2761    CameraMetadata settings;
2762    //translate from cam_capability_t to camera_metadata_tag_t
2763    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2764    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2765
2766    /*control*/
2767
2768    uint8_t controlIntent = 0;
2769    switch (type) {
2770      case CAMERA3_TEMPLATE_PREVIEW:
2771        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2772        break;
2773      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2774        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2775        break;
2776      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2777        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2778        break;
2779      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2780        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2781        break;
2782      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2783        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2784        break;
2785      default:
2786        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2787        break;
2788    }
2789    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2790
2791    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2792            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2793
2794    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2795    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2796
2797    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2798    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2799
2800    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2801    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2802
2803    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2804    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2805
2806    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2807    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2808
2809    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2810    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2811
2812    static uint8_t focusMode;
2813    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2814        ALOGE("%s: Setting focus mode to auto", __func__);
2815        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2816    } else {
2817        ALOGE("%s: Setting focus mode to off", __func__);
2818        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2819    }
2820    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2821
2822    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2823    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2824
2825    /*flash*/
2826    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2827    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2828
2829    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2830    settings.update(ANDROID_FLASH_FIRING_POWER,
2831            &flashFiringLevel, 1);
2832
2833    /* lens */
2834    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2835    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2836
2837    if (gCamCapability[mCameraId]->filter_densities_count) {
2838        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2839        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2840                        gCamCapability[mCameraId]->filter_densities_count);
2841    }
2842
2843    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2844    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2845
2846    /* frame duration */
2847    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2848    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2849
2850    /* sensitivity */
2851    static const int32_t default_sensitivity = 100;
2852    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2853
2854    /*edge mode*/
2855    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2856    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2857
2858    /*noise reduction mode*/
2859    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2860    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2861
2862    /*color correction mode*/
2863    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2864    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2865
2866    /*transform matrix mode*/
2867    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2868    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2869
2870    mDefaultMetadata[type] = settings.release();
2871
2872    pthread_mutex_unlock(&mMutex);
2873    return mDefaultMetadata[type];
2874}
2875
2876/*===========================================================================
2877 * FUNCTION   : setFrameParameters
2878 *
2879 * DESCRIPTION: set parameters per frame as requested in the metadata from
2880 *              framework
2881 *
2882 * PARAMETERS :
2883 *   @request   : request that needs to be serviced
2884 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2885 *
2886 * RETURN     : success: NO_ERROR
2887 *              failure:
2888 *==========================================================================*/
2889int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2890                    uint32_t streamTypeMask)
2891{
2892    /*translate from camera_metadata_t type to parm_type_t*/
2893    int rc = 0;
2894    if (request->settings == NULL && mFirstRequest) {
2895        /*settings cannot be null for the first request*/
2896        return BAD_VALUE;
2897    }
2898
2899    int32_t hal_version = CAM_HAL_V3;
2900
2901    memset(mParameters, 0, sizeof(parm_buffer_t));
2902    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2903    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2904                sizeof(hal_version), &hal_version);
2905    if (rc < 0) {
2906        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2907        return BAD_VALUE;
2908    }
2909
2910    /*we need to update the frame number in the parameters*/
2911    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2912                                sizeof(request->frame_number), &(request->frame_number));
2913    if (rc < 0) {
2914        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2915        return BAD_VALUE;
2916    }
2917
2918    /* Update stream id mask where buffers are requested */
2919    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2920                                sizeof(streamTypeMask), &streamTypeMask);
2921    if (rc < 0) {
2922        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2923        return BAD_VALUE;
2924    }
2925
2926    if(request->settings != NULL){
2927        rc = translateMetadataToParameters(request);
2928    }
2929    /*set the parameters to backend*/
2930    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2931    return rc;
2932}
2933
2934/*===========================================================================
2935 * FUNCTION   : translateMetadataToParameters
2936 *
2937 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2938 *
2939 *
2940 * PARAMETERS :
2941 *   @request  : request sent from framework
2942 *
2943 *
2944 * RETURN     : success: NO_ERROR
2945 *              failure:
2946 *==========================================================================*/
2947int QCamera3HardwareInterface::translateMetadataToParameters
2948                                  (const camera3_capture_request_t *request)
2949{
2950    int rc = 0;
2951    CameraMetadata frame_settings;
2952    frame_settings = request->settings;
2953
2954    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2955        int32_t antibandingMode =
2956            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2957        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2958                sizeof(antibandingMode), &antibandingMode);
2959    }
2960
2961    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2962        int32_t expCompensation = frame_settings.find(
2963            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2964        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2965            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2966        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2967            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2968        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2969          sizeof(expCompensation), &expCompensation);
2970    }
2971
2972    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2973        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2974        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2975                sizeof(aeLock), &aeLock);
2976    }
2977    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2978        cam_fps_range_t fps_range;
2979        fps_range.min_fps =
2980            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2981        fps_range.max_fps =
2982            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2983        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2984                sizeof(fps_range), &fps_range);
2985    }
2986
2987    float focalDistance = -1.0;
2988    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2989        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2990        rc = AddSetParmEntryToBatch(mParameters,
2991                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2992                sizeof(focalDistance), &focalDistance);
2993    }
2994
2995    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2996        uint8_t fwk_focusMode =
2997            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2998        uint8_t focusMode;
2999        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3000            focusMode = CAM_FOCUS_MODE_INFINITY;
3001        } else{
3002         focusMode = lookupHalName(FOCUS_MODES_MAP,
3003                                   sizeof(FOCUS_MODES_MAP),
3004                                   fwk_focusMode);
3005        }
3006        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3007                sizeof(focusMode), &focusMode);
3008    }
3009
3010    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3011        uint8_t awbLock =
3012            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3013        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3014                sizeof(awbLock), &awbLock);
3015    }
3016
3017    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3018        uint8_t fwk_whiteLevel =
3019            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3020        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3021                sizeof(WHITE_BALANCE_MODES_MAP),
3022                fwk_whiteLevel);
3023        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3024                sizeof(whiteLevel), &whiteLevel);
3025    }
3026
3027    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3028        uint8_t fwk_effectMode =
3029            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3030        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3031                sizeof(EFFECT_MODES_MAP),
3032                fwk_effectMode);
3033        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3034                sizeof(effectMode), &effectMode);
3035    }
3036
3037    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3038        uint8_t fwk_aeMode =
3039            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3040        uint8_t aeMode;
3041        int32_t redeye;
3042
3043        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3044            aeMode = CAM_AE_MODE_OFF;
3045        } else {
3046            aeMode = CAM_AE_MODE_ON;
3047        }
3048        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3049            redeye = 1;
3050        } else {
3051            redeye = 0;
3052        }
3053
3054        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3055                                          sizeof(AE_FLASH_MODE_MAP),
3056                                          fwk_aeMode);
3057        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3058                sizeof(aeMode), &aeMode);
3059        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3060                sizeof(flashMode), &flashMode);
3061        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3062                sizeof(redeye), &redeye);
3063    }
3064
3065    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3066        uint8_t colorCorrectMode =
3067            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3068        rc =
3069            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3070                    sizeof(colorCorrectMode), &colorCorrectMode);
3071    }
3072
3073    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3074        cam_color_correct_gains_t colorCorrectGains;
3075        for (int i = 0; i < 4; i++) {
3076            colorCorrectGains.gains[i] =
3077                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3078        }
3079        rc =
3080            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3081                    sizeof(colorCorrectGains), &colorCorrectGains);
3082    }
3083
3084    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3085        cam_color_correct_matrix_t colorCorrectTransform;
3086        cam_rational_type_t transform_elem;
3087        int num = 0;
3088        for (int i = 0; i < 3; i++) {
3089           for (int j = 0; j < 3; j++) {
3090              transform_elem.numerator =
3091                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3092              transform_elem.denominator =
3093                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3094              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3095              num++;
3096           }
3097        }
3098        rc =
3099            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3100                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3101    }
3102
3103    cam_trigger_t aecTrigger;
3104    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3105    aecTrigger.trigger_id = -1;
3106    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3107        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3108        aecTrigger.trigger =
3109            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3110        aecTrigger.trigger_id =
3111            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3112    }
3113    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3114                                sizeof(aecTrigger), &aecTrigger);
3115
3116    /*af_trigger must come with a trigger id*/
3117    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3118        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3119        cam_trigger_t af_trigger;
3120        af_trigger.trigger =
3121            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3122        af_trigger.trigger_id =
3123            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3124        rc = AddSetParmEntryToBatch(mParameters,
3125                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3126    }
3127
3128    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3129        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3130        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3131                sizeof(metaMode), &metaMode);
3132        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3133           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3134           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3135                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3136                                             fwk_sceneMode);
3137           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3138                sizeof(sceneMode), &sceneMode);
3139        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3140           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3141           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3142                sizeof(sceneMode), &sceneMode);
3143        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3144           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3145           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3146                sizeof(sceneMode), &sceneMode);
3147        }
3148    }
3149
3150    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3151        int32_t demosaic =
3152            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3153        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3154                sizeof(demosaic), &demosaic);
3155    }
3156
3157    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3158        cam_edge_application_t edge_application;
3159        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3160        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3161            edge_application.sharpness = 0;
3162        } else {
3163            edge_application.sharpness = 10;
3164        }
3165        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3166                sizeof(edge_application), &edge_application);
3167    }
3168
3169    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3170        int32_t edgeStrength =
3171            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3172        rc = AddSetParmEntryToBatch(mParameters,
3173                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3174    }
3175
3176    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3177        int32_t respectFlashMode = 1;
3178        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3179            uint8_t fwk_aeMode =
3180                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3181            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3182                respectFlashMode = 0;
3183                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3184                    __func__);
3185            }
3186        }
3187        if (respectFlashMode) {
3188            uint8_t flashMode =
3189                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3190            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3191                                          sizeof(FLASH_MODES_MAP),
3192                                          flashMode);
3193            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3194            // To check: CAM_INTF_META_FLASH_MODE usage
3195            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3196                          sizeof(flashMode), &flashMode);
3197        }
3198    }
3199
3200    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3201        uint8_t flashPower =
3202            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3203        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3204                sizeof(flashPower), &flashPower);
3205    }
3206
3207    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3208        int64_t flashFiringTime =
3209            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3210        rc = AddSetParmEntryToBatch(mParameters,
3211                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3212    }
3213
3214    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3215        uint8_t geometricMode =
3216            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3217        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3218                sizeof(geometricMode), &geometricMode);
3219    }
3220
3221    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3222        uint8_t geometricStrength =
3223            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3224        rc = AddSetParmEntryToBatch(mParameters,
3225                CAM_INTF_META_GEOMETRIC_STRENGTH,
3226                sizeof(geometricStrength), &geometricStrength);
3227    }
3228
3229    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3230        uint8_t hotPixelMode =
3231            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3232        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3233                sizeof(hotPixelMode), &hotPixelMode);
3234    }
3235
3236    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3237        float lensAperture =
3238            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3239        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3240                sizeof(lensAperture), &lensAperture);
3241    }
3242
3243    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3244        float filterDensity =
3245            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3246        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3247                sizeof(filterDensity), &filterDensity);
3248    }
3249
3250    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3251        float focalLength =
3252            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3253        rc = AddSetParmEntryToBatch(mParameters,
3254                CAM_INTF_META_LENS_FOCAL_LENGTH,
3255                sizeof(focalLength), &focalLength);
3256    }
3257
3258    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3259        uint8_t optStabMode =
3260            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3261        rc = AddSetParmEntryToBatch(mParameters,
3262                CAM_INTF_META_LENS_OPT_STAB_MODE,
3263                sizeof(optStabMode), &optStabMode);
3264    }
3265
3266    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3267        uint8_t noiseRedMode =
3268            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3269        rc = AddSetParmEntryToBatch(mParameters,
3270                CAM_INTF_META_NOISE_REDUCTION_MODE,
3271                sizeof(noiseRedMode), &noiseRedMode);
3272    }
3273
3274    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3275        uint8_t noiseRedStrength =
3276            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3277        rc = AddSetParmEntryToBatch(mParameters,
3278                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3279                sizeof(noiseRedStrength), &noiseRedStrength);
3280    }
3281
3282    cam_crop_region_t scalerCropRegion;
3283    bool scalerCropSet = false;
3284    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3285        scalerCropRegion.left =
3286            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3287        scalerCropRegion.top =
3288            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3289        scalerCropRegion.width =
3290            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3291        scalerCropRegion.height =
3292            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3293        rc = AddSetParmEntryToBatch(mParameters,
3294                CAM_INTF_META_SCALER_CROP_REGION,
3295                sizeof(scalerCropRegion), &scalerCropRegion);
3296        scalerCropSet = true;
3297    }
3298
3299    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3300        int64_t sensorExpTime =
3301            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3302        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3303        rc = AddSetParmEntryToBatch(mParameters,
3304                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3305                sizeof(sensorExpTime), &sensorExpTime);
3306    }
3307
3308    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3309        int64_t sensorFrameDuration =
3310            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3311        int64_t minFrameDuration = getMinFrameDuration(request);
3312        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3313        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3314            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3315        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3316        rc = AddSetParmEntryToBatch(mParameters,
3317                CAM_INTF_META_SENSOR_FRAME_DURATION,
3318                sizeof(sensorFrameDuration), &sensorFrameDuration);
3319    }
3320
3321    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3322        int32_t sensorSensitivity =
3323            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3324        if (sensorSensitivity <
3325                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3326            sensorSensitivity =
3327                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3328        if (sensorSensitivity >
3329                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3330            sensorSensitivity =
3331                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3332        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3333        rc = AddSetParmEntryToBatch(mParameters,
3334                CAM_INTF_META_SENSOR_SENSITIVITY,
3335                sizeof(sensorSensitivity), &sensorSensitivity);
3336    }
3337
3338    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3339        int32_t shadingMode =
3340            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3341        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3342                sizeof(shadingMode), &shadingMode);
3343    }
3344
3345    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3346        uint8_t shadingStrength =
3347            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3348        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3349                sizeof(shadingStrength), &shadingStrength);
3350    }
3351
3352    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3353        uint8_t fwk_facedetectMode =
3354            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3355        uint8_t facedetectMode =
3356            lookupHalName(FACEDETECT_MODES_MAP,
3357                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3358        rc = AddSetParmEntryToBatch(mParameters,
3359                CAM_INTF_META_STATS_FACEDETECT_MODE,
3360                sizeof(facedetectMode), &facedetectMode);
3361    }
3362
3363    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3364        uint8_t histogramMode =
3365            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3366        rc = AddSetParmEntryToBatch(mParameters,
3367                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3368                sizeof(histogramMode), &histogramMode);
3369    }
3370
3371    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3372        uint8_t sharpnessMapMode =
3373            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3374        rc = AddSetParmEntryToBatch(mParameters,
3375                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3376                sizeof(sharpnessMapMode), &sharpnessMapMode);
3377    }
3378
3379    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3380        uint8_t tonemapMode =
3381            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3382        rc = AddSetParmEntryToBatch(mParameters,
3383                CAM_INTF_META_TONEMAP_MODE,
3384                sizeof(tonemapMode), &tonemapMode);
3385    }
3386    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3387    /*All tonemap channels will have the same number of points*/
3388    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3389        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3390        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3391        cam_rgb_tonemap_curves tonemapCurves;
3392        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3393
3394        /* ch0 = G*/
3395        int point = 0;
3396        cam_tonemap_curve_t tonemapCurveGreen;
3397        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3398            for (int j = 0; j < 2; j++) {
3399               tonemapCurveGreen.tonemap_points[i][j] =
3400                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3401               point++;
3402            }
3403        }
3404        tonemapCurves.curves[0] = tonemapCurveGreen;
3405
3406        /* ch 1 = B */
3407        point = 0;
3408        cam_tonemap_curve_t tonemapCurveBlue;
3409        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3410            for (int j = 0; j < 2; j++) {
3411               tonemapCurveBlue.tonemap_points[i][j] =
3412                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3413               point++;
3414            }
3415        }
3416        tonemapCurves.curves[1] = tonemapCurveBlue;
3417
3418        /* ch 2 = R */
3419        point = 0;
3420        cam_tonemap_curve_t tonemapCurveRed;
3421        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3422            for (int j = 0; j < 2; j++) {
3423               tonemapCurveRed.tonemap_points[i][j] =
3424                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3425               point++;
3426            }
3427        }
3428        tonemapCurves.curves[2] = tonemapCurveRed;
3429
3430        rc = AddSetParmEntryToBatch(mParameters,
3431                CAM_INTF_META_TONEMAP_CURVES,
3432                sizeof(tonemapCurves), &tonemapCurves);
3433    }
3434
3435    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3436        uint8_t captureIntent =
3437            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3438        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3439                sizeof(captureIntent), &captureIntent);
3440    }
3441
3442    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3443        uint8_t blackLevelLock =
3444            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3445        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3446                sizeof(blackLevelLock), &blackLevelLock);
3447    }
3448
3449    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3450        uint8_t lensShadingMapMode =
3451            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3452        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3453                sizeof(lensShadingMapMode), &lensShadingMapMode);
3454    }
3455
3456    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3457        cam_area_t roi;
3458        bool reset = true;
3459        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3460        if (scalerCropSet) {
3461            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3462        }
3463        if (reset) {
3464            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3465                    sizeof(roi), &roi);
3466        }
3467    }
3468
3469    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3470        cam_area_t roi;
3471        bool reset = true;
3472        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3473        if (scalerCropSet) {
3474            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3475        }
3476        if (reset) {
3477            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3478                    sizeof(roi), &roi);
3479        }
3480    }
3481
3482    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3483        cam_area_t roi;
3484        bool reset = true;
3485        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3486        if (scalerCropSet) {
3487            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3488        }
3489        if (reset) {
3490            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3491                    sizeof(roi), &roi);
3492        }
3493    }
3494    return rc;
3495}
3496
3497/*===========================================================================
3498 * FUNCTION   : getJpegSettings
3499 *
3500 * DESCRIPTION: save the jpeg settings in the HAL
3501 *
3502 *
3503 * PARAMETERS :
3504 *   @settings  : frame settings information from framework
3505 *
3506 *
3507 * RETURN     : success: NO_ERROR
3508 *              failure:
3509 *==========================================================================*/
3510int QCamera3HardwareInterface::getJpegSettings
3511                                  (const camera_metadata_t *settings)
3512{
3513    if (mJpegSettings) {
3514        if (mJpegSettings->gps_timestamp) {
3515            free(mJpegSettings->gps_timestamp);
3516            mJpegSettings->gps_timestamp = NULL;
3517        }
3518        if (mJpegSettings->gps_coordinates) {
3519            for (int i = 0; i < 3; i++) {
3520                free(mJpegSettings->gps_coordinates[i]);
3521                mJpegSettings->gps_coordinates[i] = NULL;
3522            }
3523        }
3524        free(mJpegSettings);
3525        mJpegSettings = NULL;
3526    }
3527    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3528    CameraMetadata jpeg_settings;
3529    jpeg_settings = settings;
3530
3531    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3532        mJpegSettings->jpeg_orientation =
3533            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3534    } else {
3535        mJpegSettings->jpeg_orientation = 0;
3536    }
3537    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3538        mJpegSettings->jpeg_quality =
3539            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3540    } else {
3541        mJpegSettings->jpeg_quality = 85;
3542    }
3543    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3544        mJpegSettings->thumbnail_size.width =
3545            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3546        mJpegSettings->thumbnail_size.height =
3547            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3548    } else {
3549        mJpegSettings->thumbnail_size.width = 0;
3550        mJpegSettings->thumbnail_size.height = 0;
3551    }
3552    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3553        for (int i = 0; i < 3; i++) {
3554            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3555            *(mJpegSettings->gps_coordinates[i]) =
3556                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3557        }
3558    } else{
3559       for (int i = 0; i < 3; i++) {
3560            mJpegSettings->gps_coordinates[i] = NULL;
3561        }
3562    }
3563
3564    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3565        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3566        *(mJpegSettings->gps_timestamp) =
3567            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3568    } else {
3569        mJpegSettings->gps_timestamp = NULL;
3570    }
3571
3572    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3573        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3574        for (int i = 0; i < len; i++) {
3575            mJpegSettings->gps_processing_method[i] =
3576                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3577        }
3578        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3579            mJpegSettings->gps_processing_method[len] = '\0';
3580        }
3581    } else {
3582        mJpegSettings->gps_processing_method[0] = '\0';
3583    }
3584
3585    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3586        mJpegSettings->sensor_sensitivity =
3587            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3588    } else {
3589        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3590    }
3591
3592    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3593
3594    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3595        mJpegSettings->lens_focal_length =
3596            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3597    }
3598    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3599        mJpegSettings->exposure_compensation =
3600            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3601    }
3602    mJpegSettings->sharpness = 10; //default value
3603    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3604        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3605        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3606            mJpegSettings->sharpness = 0;
3607        }
3608    }
3609    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3610    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3611    mJpegSettings->is_jpeg_format = true;
3612    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3613    return 0;
3614}
3615
3616/*===========================================================================
3617 * FUNCTION   : captureResultCb
3618 *
3619 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3620 *
3621 * PARAMETERS :
3622 *   @frame  : frame information from mm-camera-interface
3623 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3624 *   @userdata: userdata
3625 *
3626 * RETURN     : NONE
3627 *==========================================================================*/
3628void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3629                camera3_stream_buffer_t *buffer,
3630                uint32_t frame_number, void *userdata)
3631{
3632    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3633    if (hw == NULL) {
3634        ALOGE("%s: Invalid hw %p", __func__, hw);
3635        return;
3636    }
3637
3638    hw->captureResultCb(metadata, buffer, frame_number);
3639    return;
3640}
3641
3642
3643/*===========================================================================
3644 * FUNCTION   : initialize
3645 *
3646 * DESCRIPTION: Pass framework callback pointers to HAL
3647 *
3648 * PARAMETERS :
3649 *
3650 *
3651 * RETURN     : Success : 0
3652 *              Failure: -ENODEV
3653 *==========================================================================*/
3654
3655int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3656                                  const camera3_callback_ops_t *callback_ops)
3657{
3658    ALOGV("%s: E", __func__);
3659    QCamera3HardwareInterface *hw =
3660        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3661    if (!hw) {
3662        ALOGE("%s: NULL camera device", __func__);
3663        return -ENODEV;
3664    }
3665
3666    int rc = hw->initialize(callback_ops);
3667    ALOGV("%s: X", __func__);
3668    return rc;
3669}
3670
3671/*===========================================================================
3672 * FUNCTION   : configure_streams
3673 *
3674 * DESCRIPTION:
3675 *
3676 * PARAMETERS :
3677 *
3678 *
3679 * RETURN     : Success: 0
3680 *              Failure: -EINVAL (if stream configuration is invalid)
3681 *                       -ENODEV (fatal error)
3682 *==========================================================================*/
3683
3684int QCamera3HardwareInterface::configure_streams(
3685        const struct camera3_device *device,
3686        camera3_stream_configuration_t *stream_list)
3687{
3688    ALOGV("%s: E", __func__);
3689    QCamera3HardwareInterface *hw =
3690        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3691    if (!hw) {
3692        ALOGE("%s: NULL camera device", __func__);
3693        return -ENODEV;
3694    }
3695    int rc = hw->configureStreams(stream_list);
3696    ALOGV("%s: X", __func__);
3697    return rc;
3698}
3699
3700/*===========================================================================
3701 * FUNCTION   : register_stream_buffers
3702 *
3703 * DESCRIPTION: Register stream buffers with the device
3704 *
3705 * PARAMETERS :
3706 *
3707 * RETURN     :
3708 *==========================================================================*/
3709int QCamera3HardwareInterface::register_stream_buffers(
3710        const struct camera3_device *device,
3711        const camera3_stream_buffer_set_t *buffer_set)
3712{
3713    ALOGV("%s: E", __func__);
3714    QCamera3HardwareInterface *hw =
3715        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3716    if (!hw) {
3717        ALOGE("%s: NULL camera device", __func__);
3718        return -ENODEV;
3719    }
3720    int rc = hw->registerStreamBuffers(buffer_set);
3721    ALOGV("%s: X", __func__);
3722    return rc;
3723}
3724
3725/*===========================================================================
3726 * FUNCTION   : construct_default_request_settings
3727 *
3728 * DESCRIPTION: Configure a settings buffer to meet the required use case
3729 *
3730 * PARAMETERS :
3731 *
3732 *
3733 * RETURN     : Success: Return valid metadata
3734 *              Failure: Return NULL
3735 *==========================================================================*/
3736const camera_metadata_t* QCamera3HardwareInterface::
3737    construct_default_request_settings(const struct camera3_device *device,
3738                                        int type)
3739{
3740
3741    ALOGV("%s: E", __func__);
3742    camera_metadata_t* fwk_metadata = NULL;
3743    QCamera3HardwareInterface *hw =
3744        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3745    if (!hw) {
3746        ALOGE("%s: NULL camera device", __func__);
3747        return NULL;
3748    }
3749
3750    fwk_metadata = hw->translateCapabilityToMetadata(type);
3751
3752    ALOGV("%s: X", __func__);
3753    return fwk_metadata;
3754}
3755
3756/*===========================================================================
3757 * FUNCTION   : process_capture_request
3758 *
3759 * DESCRIPTION:
3760 *
3761 * PARAMETERS :
3762 *
3763 *
3764 * RETURN     :
3765 *==========================================================================*/
3766int QCamera3HardwareInterface::process_capture_request(
3767                    const struct camera3_device *device,
3768                    camera3_capture_request_t *request)
3769{
3770    ALOGV("%s: E", __func__);
3771    QCamera3HardwareInterface *hw =
3772        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3773    if (!hw) {
3774        ALOGE("%s: NULL camera device", __func__);
3775        return -EINVAL;
3776    }
3777
3778    int rc = hw->processCaptureRequest(request);
3779    ALOGV("%s: X", __func__);
3780    return rc;
3781}
3782
3783/*===========================================================================
3784 * FUNCTION   : get_metadata_vendor_tag_ops
3785 *
3786 * DESCRIPTION:
3787 *
3788 * PARAMETERS :
3789 *
3790 *
3791 * RETURN     :
3792 *==========================================================================*/
3793
3794void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3795                const struct camera3_device *device,
3796                vendor_tag_query_ops_t* ops)
3797{
3798    ALOGV("%s: E", __func__);
3799    QCamera3HardwareInterface *hw =
3800        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3801    if (!hw) {
3802        ALOGE("%s: NULL camera device", __func__);
3803        return;
3804    }
3805
3806    hw->getMetadataVendorTagOps(ops);
3807    ALOGV("%s: X", __func__);
3808    return;
3809}
3810
3811/*===========================================================================
3812 * FUNCTION   : dump
3813 *
3814 * DESCRIPTION:
3815 *
3816 * PARAMETERS :
3817 *
3818 *
3819 * RETURN     :
3820 *==========================================================================*/
3821
3822void QCamera3HardwareInterface::dump(
3823                const struct camera3_device *device, int fd)
3824{
3825    ALOGV("%s: E", __func__);
3826    QCamera3HardwareInterface *hw =
3827        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3828    if (!hw) {
3829        ALOGE("%s: NULL camera device", __func__);
3830        return;
3831    }
3832
3833    hw->dump(fd);
3834    ALOGV("%s: X", __func__);
3835    return;
3836}
3837
3838/*===========================================================================
3839 * FUNCTION   : flush
3840 *
3841 * DESCRIPTION:
3842 *
3843 * PARAMETERS :
3844 *
3845 *
3846 * RETURN     :
3847 *==========================================================================*/
3848
3849int QCamera3HardwareInterface::flush(
3850                const struct camera3_device *device)
3851{
3852    int rc;
3853    ALOGV("%s: E", __func__);
3854    QCamera3HardwareInterface *hw =
3855        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3856    if (!hw) {
3857        ALOGE("%s: NULL camera device", __func__);
3858        return -EINVAL;
3859    }
3860
3861    rc = hw->flush();
3862    ALOGV("%s: X", __func__);
3863    return rc;
3864}
3865
3866/*===========================================================================
3867 * FUNCTION   : close_camera_device
3868 *
3869 * DESCRIPTION:
3870 *
3871 * PARAMETERS :
3872 *
3873 *
3874 * RETURN     :
3875 *==========================================================================*/
3876int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3877{
3878    ALOGV("%s: E", __func__);
3879    int ret = NO_ERROR;
3880    QCamera3HardwareInterface *hw =
3881        reinterpret_cast<QCamera3HardwareInterface *>(
3882            reinterpret_cast<camera3_device_t *>(device)->priv);
3883    if (!hw) {
3884        ALOGE("NULL camera device");
3885        return BAD_VALUE;
3886    }
3887    delete hw;
3888
3889    pthread_mutex_lock(&mCameraSessionLock);
3890    mCameraSessionActive = 0;
3891    pthread_mutex_unlock(&mCameraSessionLock);
3892    ALOGV("%s: X", __func__);
3893    return ret;
3894}
3895
3896/*===========================================================================
3897 * FUNCTION   : getWaveletDenoiseProcessPlate
3898 *
3899 * DESCRIPTION: query wavelet denoise process plate
3900 *
3901 * PARAMETERS : None
3902 *
3903 * RETURN     : WNR prcocess plate vlaue
3904 *==========================================================================*/
3905cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3906{
3907    char prop[PROPERTY_VALUE_MAX];
3908    memset(prop, 0, sizeof(prop));
3909    property_get("persist.denoise.process.plates", prop, "0");
3910    int processPlate = atoi(prop);
3911    switch(processPlate) {
3912    case 0:
3913        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3914    case 1:
3915        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3916    case 2:
3917        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3918    case 3:
3919        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3920    default:
3921        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3922    }
3923}
3924
3925/*===========================================================================
3926 * FUNCTION   : needRotationReprocess
3927 *
3928 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3929 *
3930 * PARAMETERS : none
3931 *
3932 * RETURN     : true: needed
3933 *              false: no need
3934 *==========================================================================*/
3935bool QCamera3HardwareInterface::needRotationReprocess()
3936{
3937
3938    if (!mJpegSettings->is_jpeg_format) {
3939        // RAW image, no need to reprocess
3940        return false;
3941    }
3942
3943    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3944        mJpegSettings->jpeg_orientation > 0) {
3945        // current rotation is not zero, and pp has the capability to process rotation
3946        ALOGD("%s: need do reprocess for rotation", __func__);
3947        return true;
3948    }
3949
3950    return false;
3951}
3952
3953/*===========================================================================
3954 * FUNCTION   : needReprocess
3955 *
3956 * DESCRIPTION: if reprocess in needed
3957 *
3958 * PARAMETERS : none
3959 *
3960 * RETURN     : true: needed
3961 *              false: no need
3962 *==========================================================================*/
3963bool QCamera3HardwareInterface::needReprocess()
3964{
3965    if (!mJpegSettings->is_jpeg_format) {
3966        // RAW image, no need to reprocess
3967        return false;
3968    }
3969
3970    if ((mJpegSettings->min_required_pp_mask > 0) ||
3971         isWNREnabled()) {
3972        // TODO: add for ZSL HDR later
3973        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3974        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3975        return true;
3976    }
3977    return needRotationReprocess();
3978}
3979
3980/*===========================================================================
3981 * FUNCTION   : addOnlineReprocChannel
3982 *
3983 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3984 *              coming from input channel
3985 *
3986 * PARAMETERS :
3987 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3988 *
3989 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3990 *==========================================================================*/
3991QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3992              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3993{
3994    int32_t rc = NO_ERROR;
3995    QCamera3ReprocessChannel *pChannel = NULL;
3996    if (pInputChannel == NULL) {
3997        ALOGE("%s: input channel obj is NULL", __func__);
3998        return NULL;
3999    }
4000
4001    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4002            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4003    if (NULL == pChannel) {
4004        ALOGE("%s: no mem for reprocess channel", __func__);
4005        return NULL;
4006    }
4007
4008    // Capture channel, only need snapshot and postview streams start together
4009    mm_camera_channel_attr_t attr;
4010    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4011    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4012    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4013    rc = pChannel->initialize();
4014    if (rc != NO_ERROR) {
4015        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4016        delete pChannel;
4017        return NULL;
4018    }
4019
4020    // pp feature config
4021    cam_pp_feature_config_t pp_config;
4022    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4023    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4024        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4025        pp_config.sharpness = mJpegSettings->sharpness;
4026    }
4027
4028    if (isWNREnabled()) {
4029        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4030        pp_config.denoise2d.denoise_enable = 1;
4031        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4032    }
4033    if (needRotationReprocess()) {
4034        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4035        int rotation = mJpegSettings->jpeg_orientation;
4036        if (rotation == 0) {
4037            pp_config.rotation = ROTATE_0;
4038        } else if (rotation == 90) {
4039            pp_config.rotation = ROTATE_90;
4040        } else if (rotation == 180) {
4041            pp_config.rotation = ROTATE_180;
4042        } else if (rotation == 270) {
4043            pp_config.rotation = ROTATE_270;
4044        }
4045    }
4046
4047   rc = pChannel->addReprocStreamsFromSource(pp_config,
4048                                             pInputChannel,
4049                                             mMetadataChannel);
4050
4051    if (rc != NO_ERROR) {
4052        delete pChannel;
4053        return NULL;
4054    }
4055    return pChannel;
4056}
4057
4058int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4059{
4060    return gCamCapability[mCameraId]->min_num_pp_bufs;
4061}
4062
4063bool QCamera3HardwareInterface::isWNREnabled() {
4064    return gCamCapability[mCameraId]->isWnrSupported;
4065}
4066
4067}; //end namespace qcamera
4068