QCamera3HWI.cpp revision 46b8b4e323c8d827e99eebf6c30cd349012a4475
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if ((*it)->registered && (*it)->buffer_set.buffers) {
240             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
241        }
242        if (channel)
243            delete channel;
244        free (*it);
245    }
246
247    mPictureChannel = NULL;
248
249    if (mJpegSettings != NULL) {
250        free(mJpegSettings);
251        mJpegSettings = NULL;
252    }
253
254    /* Clean up all channels */
255    if (mCameraInitialized) {
256        if (mMetadataChannel) {
257            mMetadataChannel->stop();
258            delete mMetadataChannel;
259            mMetadataChannel = NULL;
260        }
261        deinitParameters();
262    }
263
264    if (mCameraOpened)
265        closeCamera();
266
267    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
268        if (mDefaultMetadata[i])
269            free_camera_metadata(mDefaultMetadata[i]);
270
271    pthread_cond_destroy(&mRequestCond);
272
273    pthread_mutex_destroy(&mMutex);
274    ALOGV("%s: X", __func__);
275}
276
277/*===========================================================================
278 * FUNCTION   : openCamera
279 *
280 * DESCRIPTION: open camera
281 *
282 * PARAMETERS :
283 *   @hw_device  : double ptr for camera device struct
284 *
285 * RETURN     : int32_t type of status
286 *              NO_ERROR  -- success
287 *              none-zero failure code
288 *==========================================================================*/
289int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
290{
291    int rc = 0;
292    pthread_mutex_lock(&mCameraSessionLock);
293    if (mCameraSessionActive) {
294        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
295        pthread_mutex_unlock(&mCameraSessionLock);
296        return -EDQUOT;
297    }
298
299    if (mCameraOpened) {
300        *hw_device = NULL;
301        return PERMISSION_DENIED;
302    }
303
304    rc = openCamera();
305    if (rc == 0) {
306        *hw_device = &mCameraDevice.common;
307        mCameraSessionActive = 1;
308    } else
309        *hw_device = NULL;
310
311#ifdef HAS_MULTIMEDIA_HINTS
312    if (rc == 0) {
313        if (m_pPowerModule) {
314            if (m_pPowerModule->powerHint) {
315                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
316                        (void *)"state=1");
317            }
318        }
319    }
320#endif
321    pthread_mutex_unlock(&mCameraSessionLock);
322    return rc;
323}
324
325/*===========================================================================
326 * FUNCTION   : openCamera
327 *
328 * DESCRIPTION: open camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::openCamera()
337{
338    if (mCameraHandle) {
339        ALOGE("Failure: Camera already opened");
340        return ALREADY_EXISTS;
341    }
342    mCameraHandle = camera_open(mCameraId);
343    if (!mCameraHandle) {
344        ALOGE("camera_open failed.");
345        return UNKNOWN_ERROR;
346    }
347
348    mCameraOpened = true;
349
350    return NO_ERROR;
351}
352
353/*===========================================================================
354 * FUNCTION   : closeCamera
355 *
356 * DESCRIPTION: close camera
357 *
358 * PARAMETERS : none
359 *
360 * RETURN     : int32_t type of status
361 *              NO_ERROR  -- success
362 *              none-zero failure code
363 *==========================================================================*/
364int QCamera3HardwareInterface::closeCamera()
365{
366    int rc = NO_ERROR;
367
368    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
369    mCameraHandle = NULL;
370    mCameraOpened = false;
371
372#ifdef HAS_MULTIMEDIA_HINTS
373    if (rc == NO_ERROR) {
374        if (m_pPowerModule) {
375            if (m_pPowerModule->powerHint) {
376                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
377                        (void *)"state=0");
378            }
379        }
380    }
381#endif
382
383    return rc;
384}
385
386/*===========================================================================
387 * FUNCTION   : initialize
388 *
389 * DESCRIPTION: Initialize frameworks callback functions
390 *
391 * PARAMETERS :
392 *   @callback_ops : callback function to frameworks
393 *
394 * RETURN     :
395 *
396 *==========================================================================*/
397int QCamera3HardwareInterface::initialize(
398        const struct camera3_callback_ops *callback_ops)
399{
400    int rc;
401
402    pthread_mutex_lock(&mMutex);
403
404    rc = initParameters();
405    if (rc < 0) {
406        ALOGE("%s: initParamters failed %d", __func__, rc);
407       goto err1;
408    }
409    mCallbackOps = callback_ops;
410
411    pthread_mutex_unlock(&mMutex);
412    mCameraInitialized = true;
413    return 0;
414
415err1:
416    pthread_mutex_unlock(&mMutex);
417    return rc;
418}
419
420/*===========================================================================
421 * FUNCTION   : configureStreams
422 *
423 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
424 *              and output streams.
425 *
426 * PARAMETERS :
427 *   @stream_list : streams to be configured
428 *
429 * RETURN     :
430 *
431 *==========================================================================*/
432int QCamera3HardwareInterface::configureStreams(
433        camera3_stream_configuration_t *streamList)
434{
435    int rc = 0;
436    mIsZslMode = false;
437
438    // Sanity check stream_list
439    if (streamList == NULL) {
440        ALOGE("%s: NULL stream configuration", __func__);
441        return BAD_VALUE;
442    }
443    if (streamList->streams == NULL) {
444        ALOGE("%s: NULL stream list", __func__);
445        return BAD_VALUE;
446    }
447
448    if (streamList->num_streams < 1) {
449        ALOGE("%s: Bad number of streams requested: %d", __func__,
450                streamList->num_streams);
451        return BAD_VALUE;
452    }
453
454    /* first invalidate all the steams in the mStreamList
455     * if they appear again, they will be validated */
456    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
457            it != mStreamInfo.end(); it++) {
458        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
459        channel->stop();
460        (*it)->status = INVALID;
461    }
462    if (mMetadataChannel) {
463        /* If content of mStreamInfo is not 0, there is metadata stream */
464        mMetadataChannel->stop();
465    }
466
467    pthread_mutex_lock(&mMutex);
468
469    camera3_stream_t *inputStream = NULL;
470    camera3_stream_t *jpegStream = NULL;
471    cam_stream_size_info_t stream_config_info;
472
473    for (size_t i = 0; i < streamList->num_streams; i++) {
474        camera3_stream_t *newStream = streamList->streams[i];
475        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
476                __func__, newStream->stream_type, newStream->format,
477                 newStream->width, newStream->height);
478        //if the stream is in the mStreamList validate it
479        bool stream_exists = false;
480        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
481                it != mStreamInfo.end(); it++) {
482            if ((*it)->stream == newStream) {
483                QCamera3Channel *channel =
484                    (QCamera3Channel*)(*it)->stream->priv;
485                stream_exists = true;
486                (*it)->status = RECONFIGURE;
487                /*delete the channel object associated with the stream because
488                  we need to reconfigure*/
489                delete channel;
490                (*it)->stream->priv = NULL;
491            }
492        }
493        if (!stream_exists) {
494            //new stream
495            stream_info_t* stream_info;
496            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
497            stream_info->stream = newStream;
498            stream_info->status = VALID;
499            stream_info->registered = 0;
500            mStreamInfo.push_back(stream_info);
501        }
502        if (newStream->stream_type == CAMERA3_STREAM_INPUT
503                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
504            if (inputStream != NULL) {
505                ALOGE("%s: Multiple input streams requested!", __func__);
506                pthread_mutex_unlock(&mMutex);
507                return BAD_VALUE;
508            }
509            inputStream = newStream;
510        }
511        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
512            jpegStream = newStream;
513        }
514    }
515    mInputStream = inputStream;
516
517    /*clean up invalid streams*/
518    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
519            it != mStreamInfo.end();) {
520        if(((*it)->status) == INVALID){
521            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
522            delete channel;
523            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
524            free(*it);
525            it = mStreamInfo.erase(it);
526        } else {
527            it++;
528        }
529    }
530    if (mMetadataChannel) {
531        delete mMetadataChannel;
532        mMetadataChannel = NULL;
533    }
534
535    //Create metadata channel and initialize it
536    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
537                    mCameraHandle->ops, captureResultCb,
538                    &gCamCapability[mCameraId]->padding_info, this);
539    if (mMetadataChannel == NULL) {
540        ALOGE("%s: failed to allocate metadata channel", __func__);
541        rc = -ENOMEM;
542        pthread_mutex_unlock(&mMutex);
543        return rc;
544    }
545    rc = mMetadataChannel->initialize();
546    if (rc < 0) {
547        ALOGE("%s: metadata channel initialization failed", __func__);
548        delete mMetadataChannel;
549        pthread_mutex_unlock(&mMutex);
550        return rc;
551    }
552
553    /* Allocate channel objects for the requested streams */
554    for (size_t i = 0; i < streamList->num_streams; i++) {
555        camera3_stream_t *newStream = streamList->streams[i];
556        uint32_t stream_usage = newStream->usage;
557        stream_config_info.stream_sizes[i].width = newStream->width;
558        stream_config_info.stream_sizes[i].height = newStream->height;
559        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
560            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
561            //for zsl stream the size is jpeg size
562            stream_config_info.stream_sizes[i].width = jpegStream->width;
563            stream_config_info.stream_sizes[i].height = jpegStream->height;
564            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
565        } else {
566           //for non zsl streams find out the format
567           switch (newStream->format) {
568           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
569              {
570                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
571                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
572                 } else {
573                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
574                 }
575              }
576              break;
577           case HAL_PIXEL_FORMAT_YCbCr_420_888:
578              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
579              break;
580           case HAL_PIXEL_FORMAT_BLOB:
581              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
582              break;
583           default:
584              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
585              break;
586           }
587        }
588        if (newStream->priv == NULL) {
589            //New stream, construct channel
590            switch (newStream->stream_type) {
591            case CAMERA3_STREAM_INPUT:
592                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
593                break;
594            case CAMERA3_STREAM_BIDIRECTIONAL:
595                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
596                    GRALLOC_USAGE_HW_CAMERA_WRITE;
597                break;
598            case CAMERA3_STREAM_OUTPUT:
599                /* For video encoding stream, set read/write rarely
600                 * flag so that they may be set to un-cached */
601                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
602                    newStream->usage =
603                         (GRALLOC_USAGE_SW_READ_RARELY |
604                         GRALLOC_USAGE_SW_WRITE_RARELY |
605                         GRALLOC_USAGE_HW_CAMERA_WRITE);
606                else
607                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
608                break;
609            default:
610                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
611                break;
612            }
613
614            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
615                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
616                QCamera3Channel *channel;
617                switch (newStream->format) {
618                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
619                case HAL_PIXEL_FORMAT_YCbCr_420_888:
620                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
621                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
622                        jpegStream) {
623                        uint32_t width = jpegStream->width;
624                        uint32_t height = jpegStream->height;
625                        mIsZslMode = true;
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream,
629                            width, height);
630                    } else
631                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
632                            mCameraHandle->ops, captureResultCb,
633                            &gCamCapability[mCameraId]->padding_info, this, newStream);
634                    if (channel == NULL) {
635                        ALOGE("%s: allocation of channel failed", __func__);
636                        pthread_mutex_unlock(&mMutex);
637                        return -ENOMEM;
638                    }
639
640                    newStream->priv = channel;
641                    break;
642                case HAL_PIXEL_FORMAT_BLOB:
643                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
644                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
645                            mCameraHandle->ops, captureResultCb,
646                            &gCamCapability[mCameraId]->padding_info, this, newStream);
647                    if (mPictureChannel == NULL) {
648                        ALOGE("%s: allocation of channel failed", __func__);
649                        pthread_mutex_unlock(&mMutex);
650                        return -ENOMEM;
651                    }
652                    newStream->priv = (QCamera3Channel*)mPictureChannel;
653                    break;
654
655                //TODO: Add support for app consumed format?
656                default:
657                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
658                    break;
659                }
660            }
661        } else {
662            // Channel already exists for this stream
663            // Do nothing for now
664        }
665    }
666    /*For the streams to be reconfigured we need to register the buffers
667      since the framework wont*/
668    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669            it != mStreamInfo.end(); it++) {
670        if ((*it)->status == RECONFIGURE) {
671            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
672            /*only register buffers for streams that have already been
673              registered*/
674            if ((*it)->registered) {
675                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
676                        (*it)->buffer_set.buffers);
677                if (rc != NO_ERROR) {
678                    ALOGE("%s: Failed to register the buffers of old stream,\
679                            rc = %d", __func__, rc);
680                }
681                ALOGV("%s: channel %p has %d buffers",
682                        __func__, channel, (*it)->buffer_set.num_buffers);
683            }
684        }
685
686        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
687        if (index == NAME_NOT_FOUND) {
688            mPendingBuffersMap.add((*it)->stream, 0);
689        } else {
690            mPendingBuffersMap.editValueAt(index) = 0;
691        }
692    }
693
694    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
695    mPendingRequestsList.clear();
696
697    /*flush the metadata list*/
698    if (!mStoredMetadataList.empty()) {
699        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
700              m != mStoredMetadataList.end(); m++) {
701            mMetadataChannel->bufDone(m->meta_buf);
702            free(m->meta_buf);
703            m = mStoredMetadataList.erase(m);
704        }
705    }
706    int32_t hal_version = CAM_HAL_V3;
707    stream_config_info.num_streams = streamList->num_streams;
708
709    //settings/parameters don't carry over for new configureStreams
710    memset(mParameters, 0, sizeof(parm_buffer_t));
711
712    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
713    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
714                sizeof(hal_version), &hal_version);
715
716    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
717                sizeof(stream_config_info), &stream_config_info);
718
719    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
720
721    mFirstRequest = true;
722
723    //Get min frame duration for this streams configuration
724    deriveMinFrameDuration();
725
726    pthread_mutex_unlock(&mMutex);
727    return rc;
728}
729
730/*===========================================================================
731 * FUNCTION   : validateCaptureRequest
732 *
733 * DESCRIPTION: validate a capture request from camera service
734 *
735 * PARAMETERS :
736 *   @request : request from framework to process
737 *
738 * RETURN     :
739 *
740 *==========================================================================*/
741int QCamera3HardwareInterface::validateCaptureRequest(
742                    camera3_capture_request_t *request)
743{
744    ssize_t idx = 0;
745    const camera3_stream_buffer_t *b;
746    CameraMetadata meta;
747
748    /* Sanity check the request */
749    if (request == NULL) {
750        ALOGE("%s: NULL capture request", __func__);
751        return BAD_VALUE;
752    }
753
754    uint32_t frameNumber = request->frame_number;
755    if (request->input_buffer != NULL &&
756            request->input_buffer->stream != mInputStream) {
757        ALOGE("%s: Request %d: Input buffer not from input stream!",
758                __FUNCTION__, frameNumber);
759        return BAD_VALUE;
760    }
761    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
762        ALOGE("%s: Request %d: No output buffers provided!",
763                __FUNCTION__, frameNumber);
764        return BAD_VALUE;
765    }
766    if (request->input_buffer != NULL) {
767        b = request->input_buffer;
768        QCamera3Channel *channel =
769            static_cast<QCamera3Channel*>(b->stream->priv);
770        if (channel == NULL) {
771            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
772                    __func__, frameNumber, idx);
773            return BAD_VALUE;
774        }
775        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
776            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
777                    __func__, frameNumber, idx);
778            return BAD_VALUE;
779        }
780        if (b->release_fence != -1) {
781            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
782                    __func__, frameNumber, idx);
783            return BAD_VALUE;
784        }
785        if (b->buffer == NULL) {
786            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
787                    __func__, frameNumber, idx);
788            return BAD_VALUE;
789        }
790    }
791
792    // Validate all buffers
793    b = request->output_buffers;
794    do {
795        QCamera3Channel *channel =
796                static_cast<QCamera3Channel*>(b->stream->priv);
797        if (channel == NULL) {
798            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
803            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807        if (b->release_fence != -1) {
808            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
809                    __func__, frameNumber, idx);
810            return BAD_VALUE;
811        }
812        if (b->buffer == NULL) {
813            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
814                    __func__, frameNumber, idx);
815            return BAD_VALUE;
816        }
817        idx++;
818        b = request->output_buffers + idx;
819    } while (idx < (ssize_t)request->num_output_buffers);
820
821    return NO_ERROR;
822}
823
824/*===========================================================================
825 * FUNCTION   : deriveMinFrameDuration
826 *
827 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
828 *              on currently configured streams.
829 *
830 * PARAMETERS : NONE
831 *
832 * RETURN     : NONE
833 *
834 *==========================================================================*/
835void QCamera3HardwareInterface::deriveMinFrameDuration()
836{
837    int32_t maxJpegDimension, maxProcessedDimension;
838
839    maxJpegDimension = 0;
840    maxProcessedDimension = 0;
841
842    // Figure out maximum jpeg, processed, and raw dimensions
843    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
844        it != mStreamInfo.end(); it++) {
845
846        // Input stream doesn't have valid stream_type
847        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
848            continue;
849
850        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
851        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
852            if (dimension > maxJpegDimension)
853                maxJpegDimension = dimension;
854        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
855            if (dimension > maxProcessedDimension)
856                maxProcessedDimension = dimension;
857        }
858    }
859
860    //Assume all jpeg dimensions are in processed dimensions.
861    if (maxJpegDimension > maxProcessedDimension)
862        maxProcessedDimension = maxJpegDimension;
863
864    //Find minimum durations for processed, jpeg, and raw
865    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
866    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
867        if (maxProcessedDimension ==
868            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
869            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
870            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
871            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
872            break;
873        }
874    }
875}
876
877/*===========================================================================
878 * FUNCTION   : getMinFrameDuration
879 *
880 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
881 *              and current request configuration.
882 *
883 * PARAMETERS : @request: requset sent by the frameworks
884 *
885 * RETURN     : min farme duration for a particular request
886 *
887 *==========================================================================*/
888int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
889{
890    bool hasJpegStream = false;
891    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
892        const camera3_stream_t *stream = request->output_buffers[i].stream;
893        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
894            hasJpegStream = true;
895    }
896
897    if (!hasJpegStream)
898        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
899    else
900        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
901}
902
903/*===========================================================================
904 * FUNCTION   : registerStreamBuffers
905 *
906 * DESCRIPTION: Register buffers for a given stream with the HAL device.
907 *
908 * PARAMETERS :
909 *   @stream_list : streams to be configured
910 *
911 * RETURN     :
912 *
913 *==========================================================================*/
914int QCamera3HardwareInterface::registerStreamBuffers(
915        const camera3_stream_buffer_set_t *buffer_set)
916{
917    int rc = 0;
918
919    pthread_mutex_lock(&mMutex);
920
921    if (buffer_set == NULL) {
922        ALOGE("%s: Invalid buffer_set parameter.", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return -EINVAL;
925    }
926    if (buffer_set->stream == NULL) {
927        ALOGE("%s: Invalid stream parameter.", __func__);
928        pthread_mutex_unlock(&mMutex);
929        return -EINVAL;
930    }
931    if (buffer_set->num_buffers < 1) {
932        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
933        pthread_mutex_unlock(&mMutex);
934        return -EINVAL;
935    }
936    if (buffer_set->buffers == NULL) {
937        ALOGE("%s: Invalid buffers parameter.", __func__);
938        pthread_mutex_unlock(&mMutex);
939        return -EINVAL;
940    }
941
942    camera3_stream_t *stream = buffer_set->stream;
943    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
944
945    //set the buffer_set in the mStreamInfo array
946    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
947            it != mStreamInfo.end(); it++) {
948        if ((*it)->stream == stream) {
949            uint32_t numBuffers = buffer_set->num_buffers;
950            (*it)->buffer_set.stream = buffer_set->stream;
951            (*it)->buffer_set.num_buffers = numBuffers;
952            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
953            if ((*it)->buffer_set.buffers == NULL) {
954                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
955                pthread_mutex_unlock(&mMutex);
956                return -ENOMEM;
957            }
958            for (size_t j = 0; j < numBuffers; j++){
959                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
960            }
961            (*it)->registered = 1;
962        }
963    }
964    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
965    if (rc < 0) {
966        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
967        pthread_mutex_unlock(&mMutex);
968        return -ENODEV;
969    }
970
971    pthread_mutex_unlock(&mMutex);
972    return NO_ERROR;
973}
974
975/*===========================================================================
976 * FUNCTION   : processCaptureRequest
977 *
978 * DESCRIPTION: process a capture request from camera service
979 *
980 * PARAMETERS :
981 *   @request : request from framework to process
982 *
983 * RETURN     :
984 *
985 *==========================================================================*/
986int QCamera3HardwareInterface::processCaptureRequest(
987                    camera3_capture_request_t *request)
988{
989    int rc = NO_ERROR;
990    int32_t request_id;
991    CameraMetadata meta;
992    MetadataBufferInfo reproc_meta;
993    int queueMetadata = 0;
994
995    pthread_mutex_lock(&mMutex);
996
997    rc = validateCaptureRequest(request);
998    if (rc != NO_ERROR) {
999        ALOGE("%s: incoming request is not valid", __func__);
1000        pthread_mutex_unlock(&mMutex);
1001        return rc;
1002    }
1003
1004    meta = request->settings;
1005
1006    // For first capture request, send capture intent, and
1007    // stream on all streams
1008    if (mFirstRequest) {
1009
1010        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1011            int32_t hal_version = CAM_HAL_V3;
1012            uint8_t captureIntent =
1013                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1014
1015            memset(mParameters, 0, sizeof(parm_buffer_t));
1016            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1017            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1018                sizeof(hal_version), &hal_version);
1019            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1020                sizeof(captureIntent), &captureIntent);
1021            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1022                mParameters);
1023        }
1024
1025        mMetadataChannel->start();
1026        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1027            it != mStreamInfo.end(); it++) {
1028            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1029            channel->start();
1030        }
1031    }
1032
1033    uint32_t frameNumber = request->frame_number;
1034    uint32_t streamTypeMask = 0;
1035
1036    if (meta.exists(ANDROID_REQUEST_ID)) {
1037        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1038        mCurrentRequestId = request_id;
1039        ALOGV("%s: Received request with id: %d",__func__, request_id);
1040    } else if (mFirstRequest || mCurrentRequestId == -1){
1041        ALOGE("%s: Unable to find request id field, \
1042                & no previous id available", __func__);
1043        return NAME_NOT_FOUND;
1044    } else {
1045        ALOGV("%s: Re-using old request id", __func__);
1046        request_id = mCurrentRequestId;
1047    }
1048
1049    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1050                                    __func__, __LINE__,
1051                                    request->num_output_buffers,
1052                                    request->input_buffer,
1053                                    frameNumber);
1054    // Acquire all request buffers first
1055    int blob_request = 0;
1056    for (size_t i = 0; i < request->num_output_buffers; i++) {
1057        const camera3_stream_buffer_t& output = request->output_buffers[i];
1058        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1059        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1060
1061        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1062        //Call function to store local copy of jpeg data for encode params.
1063            blob_request = 1;
1064            rc = getJpegSettings(request->settings);
1065            if (rc < 0) {
1066                ALOGE("%s: failed to get jpeg parameters", __func__);
1067                pthread_mutex_unlock(&mMutex);
1068                return rc;
1069            }
1070        }
1071
1072        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1073        if (rc != OK) {
1074            ALOGE("%s: fence wait failed %d", __func__, rc);
1075            pthread_mutex_unlock(&mMutex);
1076            return rc;
1077        }
1078        streamTypeMask |= channel->getStreamTypeMask();
1079    }
1080
1081    rc = setFrameParameters(request, streamTypeMask);
1082    if (rc < 0) {
1083        ALOGE("%s: fail to set frame parameters", __func__);
1084        pthread_mutex_unlock(&mMutex);
1085        return rc;
1086    }
1087
1088    /* Update pending request list and pending buffers map */
1089    PendingRequestInfo pendingRequest;
1090    pendingRequest.frame_number = frameNumber;
1091    pendingRequest.num_buffers = request->num_output_buffers;
1092    pendingRequest.request_id = request_id;
1093    pendingRequest.blob_request = blob_request;
1094    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1095
1096    for (size_t i = 0; i < request->num_output_buffers; i++) {
1097        RequestedBufferInfo requestedBuf;
1098        requestedBuf.stream = request->output_buffers[i].stream;
1099        requestedBuf.buffer = NULL;
1100        pendingRequest.buffers.push_back(requestedBuf);
1101
1102        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1103    }
1104    mPendingRequestsList.push_back(pendingRequest);
1105
1106    // Notify metadata channel we receive a request
1107    mMetadataChannel->request(NULL, frameNumber);
1108
1109    // Call request on other streams
1110    for (size_t i = 0; i < request->num_output_buffers; i++) {
1111        const camera3_stream_buffer_t& output = request->output_buffers[i];
1112        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1113        mm_camera_buf_def_t *pInputBuffer = NULL;
1114
1115        if (channel == NULL) {
1116            ALOGE("%s: invalid channel pointer for stream", __func__);
1117            continue;
1118        }
1119
1120        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1121            QCamera3RegularChannel* inputChannel = NULL;
1122            if(request->input_buffer != NULL){
1123                //Try to get the internal format
1124                inputChannel = (QCamera3RegularChannel*)
1125                    request->input_buffer->stream->priv;
1126                if(inputChannel == NULL ){
1127                    ALOGE("%s: failed to get input channel handle", __func__);
1128                } else {
1129                    pInputBuffer =
1130                        inputChannel->getInternalFormatBuffer(
1131                                request->input_buffer->buffer);
1132                    ALOGD("%s: Input buffer dump",__func__);
1133                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1134                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1135                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1136                    ALOGD("Handle:%p", request->input_buffer->buffer);
1137                    //TODO: need to get corresponding metadata and send it to pproc
1138                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1139                         m != mStoredMetadataList.end(); m++) {
1140                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1141                            reproc_meta.meta_buf = m->meta_buf;
1142                            queueMetadata = 1;
1143                            break;
1144                        }
1145                    }
1146                }
1147            }
1148            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1149                            pInputBuffer,(QCamera3Channel*)inputChannel);
1150            if (queueMetadata) {
1151                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1152            }
1153        } else {
1154            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1155                __LINE__, output.buffer, frameNumber);
1156            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1157                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1158                     m != mStoredMetadataList.end(); m++) {
1159                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1160                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1161                            mMetadataChannel->bufDone(m->meta_buf);
1162                            free(m->meta_buf);
1163                            m = mStoredMetadataList.erase(m);
1164                            break;
1165                        }
1166                   }
1167                }
1168            }
1169            rc = channel->request(output.buffer, frameNumber);
1170        }
1171        if (rc < 0)
1172            ALOGE("%s: request failed", __func__);
1173    }
1174
1175    mFirstRequest = false;
1176    // Added a timed condition wait
1177    struct timespec ts;
1178    uint8_t isValidTimeout = 1;
1179    rc = clock_gettime(CLOCK_REALTIME, &ts);
1180    if (rc < 0) {
1181        isValidTimeout = 0;
1182        ALOGE("%s: Error reading the real time clock!!", __func__);
1183    }
1184    else {
1185        // Make timeout as 5 sec for request to be honored
1186        ts.tv_sec += 5;
1187    }
1188    //Block on conditional variable
1189    mPendingRequest = 1;
1190    while (mPendingRequest == 1) {
1191        if (!isValidTimeout) {
1192            ALOGV("%s: Blocking on conditional wait", __func__);
1193            pthread_cond_wait(&mRequestCond, &mMutex);
1194        }
1195        else {
1196            ALOGV("%s: Blocking on timed conditional wait", __func__);
1197            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1198            if (rc == ETIMEDOUT) {
1199                rc = -ENODEV;
1200                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1201                break;
1202            }
1203        }
1204        ALOGV("%s: Unblocked", __func__);
1205    }
1206
1207    pthread_mutex_unlock(&mMutex);
1208    return rc;
1209}
1210
1211/*===========================================================================
1212 * FUNCTION   : getMetadataVendorTagOps
1213 *
1214 * DESCRIPTION:
1215 *
1216 * PARAMETERS :
1217 *
1218 *
1219 * RETURN     :
1220 *==========================================================================*/
1221void QCamera3HardwareInterface::getMetadataVendorTagOps(
1222                    vendor_tag_query_ops_t* /*ops*/)
1223{
1224    /* Enable locks when we eventually add Vendor Tags */
1225    /*
1226    pthread_mutex_lock(&mMutex);
1227
1228    pthread_mutex_unlock(&mMutex);
1229    */
1230    return;
1231}
1232
1233/*===========================================================================
1234 * FUNCTION   : dump
1235 *
1236 * DESCRIPTION:
1237 *
1238 * PARAMETERS :
1239 *
1240 *
1241 * RETURN     :
1242 *==========================================================================*/
1243void QCamera3HardwareInterface::dump(int /*fd*/)
1244{
1245    /*Enable lock when we implement this function*/
1246    /*
1247    pthread_mutex_lock(&mMutex);
1248
1249    pthread_mutex_unlock(&mMutex);
1250    */
1251    return;
1252}
1253
1254/*===========================================================================
1255 * FUNCTION   : flush
1256 *
1257 * DESCRIPTION:
1258 *
1259 * PARAMETERS :
1260 *
1261 *
1262 * RETURN     :
1263 *==========================================================================*/
1264int QCamera3HardwareInterface::flush()
1265{
1266    /*Enable lock when we implement this function*/
1267    /*
1268    pthread_mutex_lock(&mMutex);
1269
1270    pthread_mutex_unlock(&mMutex);
1271    */
1272    return 0;
1273}
1274
1275/*===========================================================================
1276 * FUNCTION   : captureResultCb
1277 *
1278 * DESCRIPTION: Callback handler for all capture result
1279 *              (streams, as well as metadata)
1280 *
1281 * PARAMETERS :
1282 *   @metadata : metadata information
1283 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1284 *               NULL if metadata.
1285 *
1286 * RETURN     : NONE
1287 *==========================================================================*/
1288void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1289                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1290{
1291    pthread_mutex_lock(&mMutex);
1292
1293    if (metadata_buf) {
1294        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1295        int32_t frame_number_valid = *(int32_t *)
1296            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1297        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1298            CAM_INTF_META_PENDING_REQUESTS, metadata);
1299        uint32_t frame_number = *(uint32_t *)
1300            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1301        const struct timeval *tv = (const struct timeval *)
1302            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1303        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1304            tv->tv_usec * NSEC_PER_USEC;
1305
1306        if (!frame_number_valid) {
1307            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1308            mMetadataChannel->bufDone(metadata_buf);
1309            free(metadata_buf);
1310            goto done_metadata;
1311        }
1312        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1313                frame_number, capture_time);
1314
1315        // Go through the pending requests info and send shutter/results to frameworks
1316        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1317                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1318            camera3_capture_result_t result;
1319            camera3_notify_msg_t notify_msg;
1320            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1321
1322            // Flush out all entries with less or equal frame numbers.
1323
1324            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1325            //Right now it's the same as metadata timestamp
1326
1327            //TODO: When there is metadata drop, how do we derive the timestamp of
1328            //dropped frames? For now, we fake the dropped timestamp by substracting
1329            //from the reported timestamp
1330            nsecs_t current_capture_time = capture_time -
1331                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1332
1333            // Send shutter notify to frameworks
1334            notify_msg.type = CAMERA3_MSG_SHUTTER;
1335            notify_msg.message.shutter.frame_number = i->frame_number;
1336            notify_msg.message.shutter.timestamp = current_capture_time;
1337            mCallbackOps->notify(mCallbackOps, &notify_msg);
1338            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1339                    i->frame_number, capture_time);
1340
1341            // Send empty metadata with already filled buffers for dropped metadata
1342            // and send valid metadata with already filled buffers for current metadata
1343            if (i->frame_number < frame_number) {
1344                CameraMetadata dummyMetadata;
1345                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1346                        &current_capture_time, 1);
1347                dummyMetadata.update(ANDROID_REQUEST_ID,
1348                        &(i->request_id), 1);
1349                result.result = dummyMetadata.release();
1350            } else {
1351                result.result = translateCbMetadataToResultMetadata(metadata,
1352                        current_capture_time, i->request_id);
1353                if (mIsZslMode) {
1354                   int found_metadata = 0;
1355                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1356                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1357                        j != i->buffers.end(); j++) {
1358                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1359                         //check if corresp. zsl already exists in the stored metadata list
1360                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1361                               m != mStoredMetadataList.begin(); m++) {
1362                            if (m->frame_number == frame_number) {
1363                               m->meta_buf = metadata_buf;
1364                               found_metadata = 1;
1365                               break;
1366                            }
1367                         }
1368                         if (!found_metadata) {
1369                            MetadataBufferInfo store_meta_info;
1370                            store_meta_info.meta_buf = metadata_buf;
1371                            store_meta_info.frame_number = frame_number;
1372                            mStoredMetadataList.push_back(store_meta_info);
1373                            found_metadata = 1;
1374                         }
1375                      }
1376                   }
1377                   if (!found_metadata) {
1378                       if (!i->input_buffer_present && i->blob_request) {
1379                          //livesnapshot or fallback non-zsl snapshot case
1380                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1381                                j != i->buffers.end(); j++){
1382                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1383                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1384                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1385                                 break;
1386                              }
1387                         }
1388                       } else {
1389                            //return the metadata immediately
1390                            mMetadataChannel->bufDone(metadata_buf);
1391                            free(metadata_buf);
1392                       }
1393                   }
1394               } else if (!mIsZslMode && i->blob_request) {
1395                   //If it is a blob request then send the metadata to the picture channel
1396                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1397               } else {
1398                   // Return metadata buffer
1399                   mMetadataChannel->bufDone(metadata_buf);
1400                   free(metadata_buf);
1401               }
1402
1403            }
1404            if (!result.result) {
1405                ALOGE("%s: metadata is NULL", __func__);
1406            }
1407            result.frame_number = i->frame_number;
1408            result.num_output_buffers = 0;
1409            result.output_buffers = NULL;
1410            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1411                    j != i->buffers.end(); j++) {
1412                if (j->buffer) {
1413                    result.num_output_buffers++;
1414                }
1415            }
1416
1417            if (result.num_output_buffers > 0) {
1418                camera3_stream_buffer_t *result_buffers =
1419                    new camera3_stream_buffer_t[result.num_output_buffers];
1420                if (!result_buffers) {
1421                    ALOGE("%s: Fatal error: out of memory", __func__);
1422                }
1423                size_t result_buffers_idx = 0;
1424                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1425                        j != i->buffers.end(); j++) {
1426                    if (j->buffer) {
1427                        result_buffers[result_buffers_idx++] = *(j->buffer);
1428                        free(j->buffer);
1429                        j->buffer = NULL;
1430                        mPendingBuffersMap.editValueFor(j->stream)--;
1431                    }
1432                }
1433                result.output_buffers = result_buffers;
1434
1435                mCallbackOps->process_capture_result(mCallbackOps, &result);
1436                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1437                        __func__, result.frame_number, current_capture_time);
1438                free_camera_metadata((camera_metadata_t *)result.result);
1439                delete[] result_buffers;
1440            } else {
1441                mCallbackOps->process_capture_result(mCallbackOps, &result);
1442                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1443                        __func__, result.frame_number, current_capture_time);
1444                free_camera_metadata((camera_metadata_t *)result.result);
1445            }
1446            // erase the element from the list
1447            i = mPendingRequestsList.erase(i);
1448        }
1449
1450
1451done_metadata:
1452        bool max_buffers_dequeued = false;
1453        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1454            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1455            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1456            if (queued_buffers == stream->max_buffers) {
1457                max_buffers_dequeued = true;
1458                break;
1459            }
1460        }
1461        if (!max_buffers_dequeued && !pending_requests) {
1462            // Unblock process_capture_request
1463            mPendingRequest = 0;
1464            pthread_cond_signal(&mRequestCond);
1465        }
1466    } else {
1467        // If the frame number doesn't exist in the pending request list,
1468        // directly send the buffer to the frameworks, and update pending buffers map
1469        // Otherwise, book-keep the buffer.
1470        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1471        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1472            i++;
1473        }
1474        if (i == mPendingRequestsList.end()) {
1475            // Verify all pending requests frame_numbers are greater
1476            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1477                    j != mPendingRequestsList.end(); j++) {
1478                if (j->frame_number < frame_number) {
1479                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1480                            __func__, j->frame_number, frame_number);
1481                }
1482            }
1483            camera3_capture_result_t result;
1484            result.result = NULL;
1485            result.frame_number = frame_number;
1486            result.num_output_buffers = 1;
1487            result.output_buffers = buffer;
1488            ALOGV("%s: result frame_number = %d, buffer = %p",
1489                    __func__, frame_number, buffer);
1490            mPendingBuffersMap.editValueFor(buffer->stream)--;
1491            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1492                int found = 0;
1493                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1494                      k != mStoredMetadataList.end(); k++) {
1495                    if (k->frame_number == frame_number) {
1496                        k->zsl_buf_hdl = buffer->buffer;
1497                        found = 1;
1498                        break;
1499                    }
1500                }
1501                if (!found) {
1502                   MetadataBufferInfo meta_info;
1503                   meta_info.frame_number = frame_number;
1504                   meta_info.zsl_buf_hdl = buffer->buffer;
1505                   mStoredMetadataList.push_back(meta_info);
1506                }
1507            }
1508            mCallbackOps->process_capture_result(mCallbackOps, &result);
1509        } else {
1510            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1511                    j != i->buffers.end(); j++) {
1512                if (j->stream == buffer->stream) {
1513                    if (j->buffer != NULL) {
1514                        ALOGE("%s: Error: buffer is already set", __func__);
1515                    } else {
1516                        j->buffer = (camera3_stream_buffer_t *)malloc(
1517                                sizeof(camera3_stream_buffer_t));
1518                        *(j->buffer) = *buffer;
1519                        ALOGV("%s: cache buffer %p at result frame_number %d",
1520                                __func__, buffer, frame_number);
1521                    }
1522                }
1523            }
1524        }
1525    }
1526    pthread_mutex_unlock(&mMutex);
1527    return;
1528}
1529
1530/*===========================================================================
1531 * FUNCTION   : translateCbMetadataToResultMetadata
1532 *
1533 * DESCRIPTION:
1534 *
1535 * PARAMETERS :
1536 *   @metadata : metadata information from callback
1537 *
1538 * RETURN     : camera_metadata_t*
1539 *              metadata in a format specified by fwk
1540 *==========================================================================*/
1541camera_metadata_t*
1542QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1543                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1544                                 int32_t request_id)
1545{
1546    CameraMetadata camMetadata;
1547    camera_metadata_t* resultMetadata;
1548
1549    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1550    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1551
1552    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1553    uint8_t next_entry;
1554    while (curr_entry != CAM_INTF_PARM_MAX) {
1555       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1556       switch (curr_entry) {
1557         case CAM_INTF_META_FACE_DETECTION:{
1558             cam_face_detection_data_t *faceDetectionInfo =
1559                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1560             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1561             int32_t faceIds[numFaces];
1562             uint8_t faceScores[numFaces];
1563             int32_t faceRectangles[numFaces * 4];
1564             int32_t faceLandmarks[numFaces * 6];
1565             int j = 0, k = 0;
1566             for (int i = 0; i < numFaces; i++) {
1567                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1568                 faceScores[i] = faceDetectionInfo->faces[i].score;
1569                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1570                         faceRectangles+j, -1);
1571                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1572                 j+= 4;
1573                 k+= 6;
1574             }
1575             if (numFaces > 0) {
1576                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1577                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1578                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1579                     faceRectangles, numFaces*4);
1580                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1581                     faceLandmarks, numFaces*6);
1582             }
1583            break;
1584            }
1585         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1586             uint8_t  *color_correct_mode =
1587                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1588             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1589             break;
1590          }
1591         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1592             int32_t  *ae_precapture_id =
1593                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1594             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1595             break;
1596          }
1597         case CAM_INTF_META_AEC_ROI: {
1598            cam_area_t  *hAeRegions =
1599                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1600             int32_t aeRegions[5];
1601             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1602             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1603             break;
1604          }
1605          case CAM_INTF_META_AEC_STATE:{
1606             uint8_t *ae_state =
1607                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1608             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1609             break;
1610          }
1611          case CAM_INTF_PARM_FOCUS_MODE:{
1612             uint8_t  *focusMode =
1613                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1614             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1615                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1616             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1617             break;
1618          }
1619          case CAM_INTF_META_AF_ROI:{
1620             /*af regions*/
1621             cam_area_t  *hAfRegions =
1622                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1623             int32_t afRegions[5];
1624             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1625             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1626             break;
1627          }
1628          case CAM_INTF_META_AF_STATE: {
1629             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1630             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1631             break;
1632          }
1633          case CAM_INTF_META_AF_TRIGGER_ID: {
1634             int32_t  *afTriggerId =
1635                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1636             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1637             break;
1638          }
1639          case CAM_INTF_PARM_WHITE_BALANCE: {
1640               uint8_t  *whiteBalance =
1641                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1642               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1643                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1644                   *whiteBalance);
1645               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1646               break;
1647          }
1648          case CAM_INTF_META_AWB_REGIONS: {
1649             /*awb regions*/
1650             cam_area_t  *hAwbRegions =
1651                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1652             int32_t awbRegions[5];
1653             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1654             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1655             break;
1656          }
1657          case CAM_INTF_META_AWB_STATE: {
1658             uint8_t  *whiteBalanceState =
1659                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1660             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1661             break;
1662          }
1663          case CAM_INTF_META_MODE: {
1664             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1665             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1666             break;
1667          }
1668          case CAM_INTF_META_EDGE_MODE: {
1669             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1670             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1671             break;
1672          }
1673          case CAM_INTF_META_FLASH_POWER: {
1674             uint8_t  *flashPower =
1675                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1676             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1677             break;
1678          }
1679          case CAM_INTF_META_FLASH_FIRING_TIME: {
1680             int64_t  *flashFiringTime =
1681                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1682             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1683             break;
1684          }
1685          case CAM_INTF_META_FLASH_STATE: {
1686             uint8_t  *flashState =
1687                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1688             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1689             break;
1690          }
1691          case CAM_INTF_META_HOTPIXEL_MODE: {
1692              uint8_t  *hotPixelMode =
1693                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1694              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1695              break;
1696          }
1697          case CAM_INTF_META_LENS_APERTURE:{
1698             float  *lensAperture =
1699                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1700             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1701             break;
1702          }
1703          case CAM_INTF_META_LENS_FILTERDENSITY: {
1704             float  *filterDensity =
1705                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1706             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1707             break;
1708          }
1709          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1710             float  *focalLength =
1711                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1712             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1713             break;
1714          }
1715          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1716             float  *focusDistance =
1717                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1718             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1719             break;
1720          }
1721          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1722             float  *focusRange =
1723                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1724             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1725          }
1726          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1727             uint8_t  *opticalStab =
1728                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1729             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1730          }
1731          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1732             uint8_t  *noiseRedMode =
1733                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1734             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1735             break;
1736          }
1737          case CAM_INTF_META_SCALER_CROP_REGION: {
1738             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1739             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1740             int32_t scalerCropRegion[4];
1741             scalerCropRegion[0] = hScalerCropRegion->left;
1742             scalerCropRegion[1] = hScalerCropRegion->top;
1743             scalerCropRegion[2] = hScalerCropRegion->width;
1744             scalerCropRegion[3] = hScalerCropRegion->height;
1745             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1746             break;
1747          }
1748          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1749             int64_t  *sensorExpTime =
1750                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1751             mMetadataResponse.exposure_time = *sensorExpTime;
1752             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1753             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1754             break;
1755          }
1756          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1757             int64_t  *sensorFameDuration =
1758                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1759             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1760             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1761             break;
1762          }
1763          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1764             int32_t  *sensorSensitivity =
1765                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1766             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1767             mMetadataResponse.iso_speed = *sensorSensitivity;
1768             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1769             break;
1770          }
1771          case CAM_INTF_META_SHADING_MODE: {
1772             uint8_t  *shadingMode =
1773                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1774             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1775             break;
1776          }
1777          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1778             uint8_t  *faceDetectMode =
1779                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1780             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1781                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1782                                                        *faceDetectMode);
1783             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1784             break;
1785          }
1786          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1787             uint8_t  *histogramMode =
1788                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1789             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1790             break;
1791          }
1792          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1793               uint8_t  *sharpnessMapMode =
1794                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1795               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1796                                  sharpnessMapMode, 1);
1797               break;
1798           }
1799          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1800               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1801               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1802               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1803                                  (int32_t*)sharpnessMap->sharpness,
1804                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1805               break;
1806          }
1807          case CAM_INTF_META_LENS_SHADING_MAP: {
1808               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1809               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1810               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1811               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1812               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1813                                  (float*)lensShadingMap->lens_shading,
1814                                  4*map_width*map_height);
1815               break;
1816          }
1817          case CAM_INTF_META_TONEMAP_CURVES:{
1818             //Populate CAM_INTF_META_TONEMAP_CURVES
1819             /* ch0 = G, ch 1 = B, ch 2 = R*/
1820             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1821             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1822             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1823                                (float*)tonemap->curves[0].tonemap_points,
1824                                tonemap->tonemap_points_cnt * 2);
1825
1826             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1827                                (float*)tonemap->curves[1].tonemap_points,
1828                                tonemap->tonemap_points_cnt * 2);
1829
1830             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1831                                (float*)tonemap->curves[2].tonemap_points,
1832                                tonemap->tonemap_points_cnt * 2);
1833             break;
1834          }
1835          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1836             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1837             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1838             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1839             break;
1840          }
1841          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1842              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1843              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1844              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1845                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1846              break;
1847          }
1848          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1849             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1850             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1851             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1852                       predColorCorrectionGains->gains, 4);
1853             break;
1854          }
1855          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1856             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1857                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1858             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1859                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1860             break;
1861
1862          }
1863          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1864             uint8_t *blackLevelLock = (uint8_t*)
1865               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1866             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1867             break;
1868          }
1869          case CAM_INTF_META_SCENE_FLICKER:{
1870             uint8_t *sceneFlicker = (uint8_t*)
1871             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1872             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1873             break;
1874          }
1875          case CAM_INTF_PARM_LED_MODE:
1876             break;
1877          default:
1878             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1879                   __func__, curr_entry);
1880             break;
1881       }
1882       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1883       curr_entry = next_entry;
1884    }
1885    resultMetadata = camMetadata.release();
1886    return resultMetadata;
1887}
1888
1889/*===========================================================================
1890 * FUNCTION   : convertToRegions
1891 *
1892 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1893 *
1894 * PARAMETERS :
1895 *   @rect   : cam_rect_t struct to convert
1896 *   @region : int32_t destination array
1897 *   @weight : if we are converting from cam_area_t, weight is valid
1898 *             else weight = -1
1899 *
1900 *==========================================================================*/
1901void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1902    region[0] = rect.left;
1903    region[1] = rect.top;
1904    region[2] = rect.left + rect.width;
1905    region[3] = rect.top + rect.height;
1906    if (weight > -1) {
1907        region[4] = weight;
1908    }
1909}
1910
1911/*===========================================================================
1912 * FUNCTION   : convertFromRegions
1913 *
1914 * DESCRIPTION: helper method to convert from array to cam_rect_t
1915 *
1916 * PARAMETERS :
1917 *   @rect   : cam_rect_t struct to convert
1918 *   @region : int32_t destination array
1919 *   @weight : if we are converting from cam_area_t, weight is valid
1920 *             else weight = -1
1921 *
1922 *==========================================================================*/
1923void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1924                                                   const camera_metadata_t *settings,
1925                                                   uint32_t tag){
1926    CameraMetadata frame_settings;
1927    frame_settings = settings;
1928    int32_t x_min = frame_settings.find(tag).data.i32[0];
1929    int32_t y_min = frame_settings.find(tag).data.i32[1];
1930    int32_t x_max = frame_settings.find(tag).data.i32[2];
1931    int32_t y_max = frame_settings.find(tag).data.i32[3];
1932    roi->weight = frame_settings.find(tag).data.i32[4];
1933    roi->rect.left = x_min;
1934    roi->rect.top = y_min;
1935    roi->rect.width = x_max - x_min;
1936    roi->rect.height = y_max - y_min;
1937}
1938
1939/*===========================================================================
1940 * FUNCTION   : resetIfNeededROI
1941 *
1942 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1943 *              crop region
1944 *
1945 * PARAMETERS :
1946 *   @roi       : cam_area_t struct to resize
1947 *   @scalerCropRegion : cam_crop_region_t region to compare against
1948 *
1949 *
1950 *==========================================================================*/
1951bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1952                                                 const cam_crop_region_t* scalerCropRegion)
1953{
1954    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1955    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1956    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1957    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1958    if ((roi_x_max < scalerCropRegion->left) ||
1959        (roi_y_max < scalerCropRegion->top)  ||
1960        (roi->rect.left > crop_x_max) ||
1961        (roi->rect.top > crop_y_max)){
1962        return false;
1963    }
1964    if (roi->rect.left < scalerCropRegion->left) {
1965        roi->rect.left = scalerCropRegion->left;
1966    }
1967    if (roi->rect.top < scalerCropRegion->top) {
1968        roi->rect.top = scalerCropRegion->top;
1969    }
1970    if (roi_x_max > crop_x_max) {
1971        roi_x_max = crop_x_max;
1972    }
1973    if (roi_y_max > crop_y_max) {
1974        roi_y_max = crop_y_max;
1975    }
1976    roi->rect.width = roi_x_max - roi->rect.left;
1977    roi->rect.height = roi_y_max - roi->rect.top;
1978    return true;
1979}
1980
1981/*===========================================================================
1982 * FUNCTION   : convertLandmarks
1983 *
1984 * DESCRIPTION: helper method to extract the landmarks from face detection info
1985 *
1986 * PARAMETERS :
1987 *   @face   : cam_rect_t struct to convert
1988 *   @landmarks : int32_t destination array
1989 *
1990 *
1991 *==========================================================================*/
1992void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1993{
1994    landmarks[0] = face.left_eye_center.x;
1995    landmarks[1] = face.left_eye_center.y;
1996    landmarks[2] = face.right_eye_center.y;
1997    landmarks[3] = face.right_eye_center.y;
1998    landmarks[4] = face.mouth_center.x;
1999    landmarks[5] = face.mouth_center.y;
2000}
2001
2002#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2003/*===========================================================================
2004 * FUNCTION   : initCapabilities
2005 *
2006 * DESCRIPTION: initialize camera capabilities in static data struct
2007 *
2008 * PARAMETERS :
2009 *   @cameraId  : camera Id
2010 *
2011 * RETURN     : int32_t type of status
2012 *              NO_ERROR  -- success
2013 *              none-zero failure code
2014 *==========================================================================*/
2015int QCamera3HardwareInterface::initCapabilities(int cameraId)
2016{
2017    int rc = 0;
2018    mm_camera_vtbl_t *cameraHandle = NULL;
2019    QCamera3HeapMemory *capabilityHeap = NULL;
2020
2021    cameraHandle = camera_open(cameraId);
2022    if (!cameraHandle) {
2023        ALOGE("%s: camera_open failed", __func__);
2024        rc = -1;
2025        goto open_failed;
2026    }
2027
2028    capabilityHeap = new QCamera3HeapMemory();
2029    if (capabilityHeap == NULL) {
2030        ALOGE("%s: creation of capabilityHeap failed", __func__);
2031        goto heap_creation_failed;
2032    }
2033    /* Allocate memory for capability buffer */
2034    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2035    if(rc != OK) {
2036        ALOGE("%s: No memory for cappability", __func__);
2037        goto allocate_failed;
2038    }
2039
2040    /* Map memory for capability buffer */
2041    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2042    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2043                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2044                                capabilityHeap->getFd(0),
2045                                sizeof(cam_capability_t));
2046    if(rc < 0) {
2047        ALOGE("%s: failed to map capability buffer", __func__);
2048        goto map_failed;
2049    }
2050
2051    /* Query Capability */
2052    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2053    if(rc < 0) {
2054        ALOGE("%s: failed to query capability",__func__);
2055        goto query_failed;
2056    }
2057    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2058    if (!gCamCapability[cameraId]) {
2059        ALOGE("%s: out of memory", __func__);
2060        goto query_failed;
2061    }
2062    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2063                                        sizeof(cam_capability_t));
2064    rc = 0;
2065
2066query_failed:
2067    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2068                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2069map_failed:
2070    capabilityHeap->deallocate();
2071allocate_failed:
2072    delete capabilityHeap;
2073heap_creation_failed:
2074    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2075    cameraHandle = NULL;
2076open_failed:
2077    return rc;
2078}
2079
2080/*===========================================================================
2081 * FUNCTION   : initParameters
2082 *
2083 * DESCRIPTION: initialize camera parameters
2084 *
2085 * PARAMETERS :
2086 *
2087 * RETURN     : int32_t type of status
2088 *              NO_ERROR  -- success
2089 *              none-zero failure code
2090 *==========================================================================*/
2091int QCamera3HardwareInterface::initParameters()
2092{
2093    int rc = 0;
2094
2095    //Allocate Set Param Buffer
2096    mParamHeap = new QCamera3HeapMemory();
2097    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2098    if(rc != OK) {
2099        rc = NO_MEMORY;
2100        ALOGE("Failed to allocate SETPARM Heap memory");
2101        delete mParamHeap;
2102        mParamHeap = NULL;
2103        return rc;
2104    }
2105
2106    //Map memory for parameters buffer
2107    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2108            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2109            mParamHeap->getFd(0),
2110            sizeof(parm_buffer_t));
2111    if(rc < 0) {
2112        ALOGE("%s:failed to map SETPARM buffer",__func__);
2113        rc = FAILED_TRANSACTION;
2114        mParamHeap->deallocate();
2115        delete mParamHeap;
2116        mParamHeap = NULL;
2117        return rc;
2118    }
2119
2120    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2121    return rc;
2122}
2123
2124/*===========================================================================
2125 * FUNCTION   : deinitParameters
2126 *
2127 * DESCRIPTION: de-initialize camera parameters
2128 *
2129 * PARAMETERS :
2130 *
2131 * RETURN     : NONE
2132 *==========================================================================*/
2133void QCamera3HardwareInterface::deinitParameters()
2134{
2135    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2136            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2137
2138    mParamHeap->deallocate();
2139    delete mParamHeap;
2140    mParamHeap = NULL;
2141
2142    mParameters = NULL;
2143}
2144
2145/*===========================================================================
2146 * FUNCTION   : calcMaxJpegSize
2147 *
2148 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2149 *
2150 * PARAMETERS :
2151 *
2152 * RETURN     : max_jpeg_size
2153 *==========================================================================*/
2154int QCamera3HardwareInterface::calcMaxJpegSize()
2155{
2156    int32_t max_jpeg_size = 0;
2157    int temp_width, temp_height;
2158    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2159        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2160        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2161        if (temp_width * temp_height > max_jpeg_size ) {
2162            max_jpeg_size = temp_width * temp_height;
2163        }
2164    }
2165    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2166    return max_jpeg_size;
2167}
2168
2169/*===========================================================================
2170 * FUNCTION   : initStaticMetadata
2171 *
2172 * DESCRIPTION: initialize the static metadata
2173 *
2174 * PARAMETERS :
2175 *   @cameraId  : camera Id
2176 *
2177 * RETURN     : int32_t type of status
2178 *              0  -- success
2179 *              non-zero failure code
2180 *==========================================================================*/
2181int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2182{
2183    int rc = 0;
2184    CameraMetadata staticInfo;
2185
2186    /* android.info: hardware level */
2187    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2188    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2189        &supportedHardwareLevel, 1);
2190
2191    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2192    /*HAL 3 only*/
2193    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2194                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2195
2196    /*hard coded for now but this should come from sensor*/
2197    float min_focus_distance;
2198    if(facingBack){
2199        min_focus_distance = 10;
2200    } else {
2201        min_focus_distance = 0;
2202    }
2203    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2204                    &min_focus_distance, 1);
2205
2206    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2207                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2208
2209    /*should be using focal lengths but sensor doesn't provide that info now*/
2210    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2211                      &gCamCapability[cameraId]->focal_length,
2212                      1);
2213
2214    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2215                      gCamCapability[cameraId]->apertures,
2216                      gCamCapability[cameraId]->apertures_count);
2217
2218    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2219                gCamCapability[cameraId]->filter_densities,
2220                gCamCapability[cameraId]->filter_densities_count);
2221
2222
2223    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2224                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2225                      gCamCapability[cameraId]->optical_stab_modes_count);
2226
2227    staticInfo.update(ANDROID_LENS_POSITION,
2228                      gCamCapability[cameraId]->lens_position,
2229                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2230
2231    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2232                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2233    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2234                      lens_shading_map_size,
2235                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2236
2237    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2238                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2239    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2240            geo_correction_map_size,
2241            sizeof(geo_correction_map_size)/sizeof(int32_t));
2242
2243    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2244                       gCamCapability[cameraId]->geo_correction_map,
2245                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2246
2247    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2248            gCamCapability[cameraId]->sensor_physical_size, 2);
2249
2250    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2251            gCamCapability[cameraId]->exposure_time_range, 2);
2252
2253    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2254            &gCamCapability[cameraId]->max_frame_duration, 1);
2255
2256    camera_metadata_rational baseGainFactor = {
2257            gCamCapability[cameraId]->base_gain_factor.numerator,
2258            gCamCapability[cameraId]->base_gain_factor.denominator};
2259    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2260                      &baseGainFactor, 1);
2261
2262    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2263                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2264
2265    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2266                                               gCamCapability[cameraId]->pixel_array_size.height};
2267    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2268                      pixel_array_size, 2);
2269
2270    int32_t active_array_size[] = {0, 0,
2271                                                gCamCapability[cameraId]->active_array_size.width,
2272                                                gCamCapability[cameraId]->active_array_size.height};
2273    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2274                      active_array_size, 4);
2275
2276    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2277            &gCamCapability[cameraId]->white_level, 1);
2278
2279    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2280            gCamCapability[cameraId]->black_level_pattern, 4);
2281
2282    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2283                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2284
2285    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2286                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2287
2288    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2289                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2290
2291    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2292                      &gCamCapability[cameraId]->histogram_size, 1);
2293
2294    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2295            &gCamCapability[cameraId]->max_histogram_count, 1);
2296
2297    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2298                                                gCamCapability[cameraId]->sharpness_map_size.height};
2299
2300    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2301            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2302
2303    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2304            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2305
2306
2307    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2308                      &gCamCapability[cameraId]->raw_min_duration,
2309                       1);
2310
2311    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2312                                                HAL_PIXEL_FORMAT_BLOB};
2313    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2314    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2315                      scalar_formats,
2316                      scalar_formats_count);
2317
2318    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2319    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2320              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2321              available_processed_sizes);
2322    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2323                available_processed_sizes,
2324                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2325
2326    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2327                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2328                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2329
2330    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2331    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2332                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2333                 available_fps_ranges);
2334    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2335            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2336
2337    camera_metadata_rational exposureCompensationStep = {
2338            gCamCapability[cameraId]->exp_compensation_step.numerator,
2339            gCamCapability[cameraId]->exp_compensation_step.denominator};
2340    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2341                      &exposureCompensationStep, 1);
2342
2343    /*TO DO*/
2344    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2345    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2346                      availableVstabModes, sizeof(availableVstabModes));
2347
2348    /*HAL 1 and HAL 3 common*/
2349    float maxZoom = 4;
2350    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2351            &maxZoom, 1);
2352
2353    int32_t max3aRegions = 1;
2354    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2355            &max3aRegions, 1);
2356
2357    uint8_t availableFaceDetectModes[] = {
2358            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2359            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2360    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2361                      availableFaceDetectModes,
2362                      sizeof(availableFaceDetectModes));
2363
2364    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2365                                                        gCamCapability[cameraId]->exposure_compensation_max};
2366    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2367            exposureCompensationRange,
2368            sizeof(exposureCompensationRange)/sizeof(int32_t));
2369
2370    uint8_t lensFacing = (facingBack) ?
2371            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2372    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2373
2374    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2375                available_processed_sizes,
2376                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2377
2378    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2379                      available_thumbnail_sizes,
2380                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2381
2382    int32_t max_jpeg_size = 0;
2383    int temp_width, temp_height;
2384    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2385        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2386        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2387        if (temp_width * temp_height > max_jpeg_size ) {
2388            max_jpeg_size = temp_width * temp_height;
2389        }
2390    }
2391    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2392    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2393                      &max_jpeg_size, 1);
2394
2395    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2396    int32_t size = 0;
2397    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2398        int val = lookupFwkName(EFFECT_MODES_MAP,
2399                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2400                                   gCamCapability[cameraId]->supported_effects[i]);
2401        if (val != NAME_NOT_FOUND) {
2402            avail_effects[size] = (uint8_t)val;
2403            size++;
2404        }
2405    }
2406    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2407                      avail_effects,
2408                      size);
2409
2410    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2411    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2412    int32_t supported_scene_modes_cnt = 0;
2413    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2414        int val = lookupFwkName(SCENE_MODES_MAP,
2415                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2416                                gCamCapability[cameraId]->supported_scene_modes[i]);
2417        if (val != NAME_NOT_FOUND) {
2418            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2419            supported_indexes[supported_scene_modes_cnt] = i;
2420            supported_scene_modes_cnt++;
2421        }
2422    }
2423
2424    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2425                      avail_scene_modes,
2426                      supported_scene_modes_cnt);
2427
2428    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2429    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2430                      supported_scene_modes_cnt,
2431                      scene_mode_overrides,
2432                      supported_indexes,
2433                      cameraId);
2434    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2435                      scene_mode_overrides,
2436                      supported_scene_modes_cnt*3);
2437
2438    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2439    size = 0;
2440    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2441        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2442                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2443                                 gCamCapability[cameraId]->supported_antibandings[i]);
2444        if (val != NAME_NOT_FOUND) {
2445            avail_antibanding_modes[size] = (uint8_t)val;
2446            size++;
2447        }
2448
2449    }
2450    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2451                      avail_antibanding_modes,
2452                      size);
2453
2454    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2455    size = 0;
2456    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2457        int val = lookupFwkName(FOCUS_MODES_MAP,
2458                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2459                                gCamCapability[cameraId]->supported_focus_modes[i]);
2460        if (val != NAME_NOT_FOUND) {
2461            avail_af_modes[size] = (uint8_t)val;
2462            size++;
2463        }
2464    }
2465    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2466                      avail_af_modes,
2467                      size);
2468
2469    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2470    size = 0;
2471    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2472        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2473                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2474                                    gCamCapability[cameraId]->supported_white_balances[i]);
2475        if (val != NAME_NOT_FOUND) {
2476            avail_awb_modes[size] = (uint8_t)val;
2477            size++;
2478        }
2479    }
2480    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2481                      avail_awb_modes,
2482                      size);
2483
2484    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2485    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2486      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2487
2488    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2489            available_flash_levels,
2490            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2491
2492
2493    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2494    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2495            &flashAvailable, 1);
2496
2497    uint8_t avail_ae_modes[5];
2498    size = 0;
2499    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2500        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2501        size++;
2502    }
2503    if (flashAvailable) {
2504        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2505        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2506        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2507    }
2508    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2509                      avail_ae_modes,
2510                      size);
2511
2512    int32_t sensitivity_range[2];
2513    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2514    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2515    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2516                      sensitivity_range,
2517                      sizeof(sensitivity_range) / sizeof(int32_t));
2518
2519    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2520                      &gCamCapability[cameraId]->max_analog_sensitivity,
2521                      1);
2522
2523    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2524                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2525                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2526
2527    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2528    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2529                      &sensor_orientation,
2530                      1);
2531
2532    int32_t max_output_streams[3] = {1, 3, 1};
2533    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2534                      max_output_streams,
2535                      3);
2536
2537    gStaticMetadata[cameraId] = staticInfo.release();
2538    return rc;
2539}
2540
2541/*===========================================================================
2542 * FUNCTION   : makeTable
2543 *
2544 * DESCRIPTION: make a table of sizes
2545 *
2546 * PARAMETERS :
2547 *
2548 *
2549 *==========================================================================*/
2550void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2551                                          int32_t* sizeTable)
2552{
2553    int j = 0;
2554    for (int i = 0; i < size; i++) {
2555        sizeTable[j] = dimTable[i].width;
2556        sizeTable[j+1] = dimTable[i].height;
2557        j+=2;
2558    }
2559}
2560
2561/*===========================================================================
2562 * FUNCTION   : makeFPSTable
2563 *
2564 * DESCRIPTION: make a table of fps ranges
2565 *
2566 * PARAMETERS :
2567 *
2568 *==========================================================================*/
2569void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2570                                          int32_t* fpsRangesTable)
2571{
2572    int j = 0;
2573    for (int i = 0; i < size; i++) {
2574        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2575        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2576        j+=2;
2577    }
2578}
2579
2580/*===========================================================================
2581 * FUNCTION   : makeOverridesList
2582 *
2583 * DESCRIPTION: make a list of scene mode overrides
2584 *
2585 * PARAMETERS :
2586 *
2587 *
2588 *==========================================================================*/
2589void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2590                                                  uint8_t size, uint8_t* overridesList,
2591                                                  uint8_t* supported_indexes,
2592                                                  int camera_id)
2593{
2594    /*daemon will give a list of overrides for all scene modes.
2595      However we should send the fwk only the overrides for the scene modes
2596      supported by the framework*/
2597    int j = 0, index = 0, supt = 0;
2598    uint8_t focus_override;
2599    for (int i = 0; i < size; i++) {
2600        supt = 0;
2601        index = supported_indexes[i];
2602        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2603        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2604                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2605                                                    overridesTable[index].awb_mode);
2606        focus_override = (uint8_t)overridesTable[index].af_mode;
2607        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2608           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2609              supt = 1;
2610              break;
2611           }
2612        }
2613        if (supt) {
2614           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2615                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2616                                              focus_override);
2617        } else {
2618           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2619        }
2620        j+=3;
2621    }
2622}
2623
2624/*===========================================================================
2625 * FUNCTION   : getPreviewHalPixelFormat
2626 *
2627 * DESCRIPTION: convert the format to type recognized by framework
2628 *
2629 * PARAMETERS : format : the format from backend
2630 *
2631 ** RETURN    : format recognized by framework
2632 *
2633 *==========================================================================*/
2634int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2635{
2636    int32_t halPixelFormat;
2637
2638    switch (format) {
2639    case CAM_FORMAT_YUV_420_NV12:
2640        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2641        break;
2642    case CAM_FORMAT_YUV_420_NV21:
2643        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2644        break;
2645    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2646        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2647        break;
2648    case CAM_FORMAT_YUV_420_YV12:
2649        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2650        break;
2651    case CAM_FORMAT_YUV_422_NV16:
2652    case CAM_FORMAT_YUV_422_NV61:
2653    default:
2654        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2655        break;
2656    }
2657    return halPixelFormat;
2658}
2659
2660/*===========================================================================
2661 * FUNCTION   : getSensorSensitivity
2662 *
2663 * DESCRIPTION: convert iso_mode to an integer value
2664 *
2665 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2666 *
2667 ** RETURN    : sensitivity supported by sensor
2668 *
2669 *==========================================================================*/
2670int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2671{
2672    int32_t sensitivity;
2673
2674    switch (iso_mode) {
2675    case CAM_ISO_MODE_100:
2676        sensitivity = 100;
2677        break;
2678    case CAM_ISO_MODE_200:
2679        sensitivity = 200;
2680        break;
2681    case CAM_ISO_MODE_400:
2682        sensitivity = 400;
2683        break;
2684    case CAM_ISO_MODE_800:
2685        sensitivity = 800;
2686        break;
2687    case CAM_ISO_MODE_1600:
2688        sensitivity = 1600;
2689        break;
2690    default:
2691        sensitivity = -1;
2692        break;
2693    }
2694    return sensitivity;
2695}
2696
2697
2698/*===========================================================================
2699 * FUNCTION   : AddSetParmEntryToBatch
2700 *
2701 * DESCRIPTION: add set parameter entry into batch
2702 *
2703 * PARAMETERS :
2704 *   @p_table     : ptr to parameter buffer
2705 *   @paramType   : parameter type
2706 *   @paramLength : length of parameter value
2707 *   @paramValue  : ptr to parameter value
2708 *
2709 * RETURN     : int32_t type of status
2710 *              NO_ERROR  -- success
2711 *              none-zero failure code
2712 *==========================================================================*/
2713int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2714                                                          cam_intf_parm_type_t paramType,
2715                                                          uint32_t paramLength,
2716                                                          void *paramValue)
2717{
2718    int position = paramType;
2719    int current, next;
2720
2721    /*************************************************************************
2722    *                 Code to take care of linking next flags                *
2723    *************************************************************************/
2724    current = GET_FIRST_PARAM_ID(p_table);
2725    if (position == current){
2726        //DO NOTHING
2727    } else if (position < current){
2728        SET_NEXT_PARAM_ID(position, p_table, current);
2729        SET_FIRST_PARAM_ID(p_table, position);
2730    } else {
2731        /* Search for the position in the linked list where we need to slot in*/
2732        while (position > GET_NEXT_PARAM_ID(current, p_table))
2733            current = GET_NEXT_PARAM_ID(current, p_table);
2734
2735        /*If node already exists no need to alter linking*/
2736        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2737            next = GET_NEXT_PARAM_ID(current, p_table);
2738            SET_NEXT_PARAM_ID(current, p_table, position);
2739            SET_NEXT_PARAM_ID(position, p_table, next);
2740        }
2741    }
2742
2743    /*************************************************************************
2744    *                   Copy contents into entry                             *
2745    *************************************************************************/
2746
2747    if (paramLength > sizeof(parm_type_t)) {
2748        ALOGE("%s:Size of input larger than max entry size",__func__);
2749        return BAD_VALUE;
2750    }
2751    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2752    return NO_ERROR;
2753}
2754
2755/*===========================================================================
2756 * FUNCTION   : lookupFwkName
2757 *
2758 * DESCRIPTION: In case the enum is not same in fwk and backend
2759 *              make sure the parameter is correctly propogated
2760 *
2761 * PARAMETERS  :
2762 *   @arr      : map between the two enums
2763 *   @len      : len of the map
2764 *   @hal_name : name of the hal_parm to map
2765 *
2766 * RETURN     : int type of status
2767 *              fwk_name  -- success
2768 *              none-zero failure code
2769 *==========================================================================*/
2770int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2771                                             int len, int hal_name)
2772{
2773
2774    for (int i = 0; i < len; i++) {
2775        if (arr[i].hal_name == hal_name)
2776            return arr[i].fwk_name;
2777    }
2778
2779    /* Not able to find matching framework type is not necessarily
2780     * an error case. This happens when mm-camera supports more attributes
2781     * than the frameworks do */
2782    ALOGD("%s: Cannot find matching framework type", __func__);
2783    return NAME_NOT_FOUND;
2784}
2785
2786/*===========================================================================
2787 * FUNCTION   : lookupHalName
2788 *
2789 * DESCRIPTION: In case the enum is not same in fwk and backend
2790 *              make sure the parameter is correctly propogated
2791 *
2792 * PARAMETERS  :
2793 *   @arr      : map between the two enums
2794 *   @len      : len of the map
2795 *   @fwk_name : name of the hal_parm to map
2796 *
2797 * RETURN     : int32_t type of status
2798 *              hal_name  -- success
2799 *              none-zero failure code
2800 *==========================================================================*/
2801int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2802                                             int len, int fwk_name)
2803{
2804    for (int i = 0; i < len; i++) {
2805       if (arr[i].fwk_name == fwk_name)
2806           return arr[i].hal_name;
2807    }
2808    ALOGE("%s: Cannot find matching hal type", __func__);
2809    return NAME_NOT_FOUND;
2810}
2811
2812/*===========================================================================
2813 * FUNCTION   : getCapabilities
2814 *
2815 * DESCRIPTION: query camera capabilities
2816 *
2817 * PARAMETERS :
2818 *   @cameraId  : camera Id
2819 *   @info      : camera info struct to be filled in with camera capabilities
2820 *
2821 * RETURN     : int32_t type of status
2822 *              NO_ERROR  -- success
2823 *              none-zero failure code
2824 *==========================================================================*/
2825int QCamera3HardwareInterface::getCamInfo(int cameraId,
2826                                    struct camera_info *info)
2827{
2828    int rc = 0;
2829
2830    if (NULL == gCamCapability[cameraId]) {
2831        rc = initCapabilities(cameraId);
2832        if (rc < 0) {
2833            //pthread_mutex_unlock(&g_camlock);
2834            return rc;
2835        }
2836    }
2837
2838    if (NULL == gStaticMetadata[cameraId]) {
2839        rc = initStaticMetadata(cameraId);
2840        if (rc < 0) {
2841            return rc;
2842        }
2843    }
2844
2845    switch(gCamCapability[cameraId]->position) {
2846    case CAM_POSITION_BACK:
2847        info->facing = CAMERA_FACING_BACK;
2848        break;
2849
2850    case CAM_POSITION_FRONT:
2851        info->facing = CAMERA_FACING_FRONT;
2852        break;
2853
2854    default:
2855        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2856        rc = -1;
2857        break;
2858    }
2859
2860
2861    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2862    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2863    info->static_camera_characteristics = gStaticMetadata[cameraId];
2864
2865    return rc;
2866}
2867
2868/*===========================================================================
2869 * FUNCTION   : translateMetadata
2870 *
2871 * DESCRIPTION: translate the metadata into camera_metadata_t
2872 *
2873 * PARAMETERS : type of the request
2874 *
2875 *
2876 * RETURN     : success: camera_metadata_t*
2877 *              failure: NULL
2878 *
2879 *==========================================================================*/
2880camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2881{
2882    pthread_mutex_lock(&mMutex);
2883
2884    if (mDefaultMetadata[type] != NULL) {
2885        pthread_mutex_unlock(&mMutex);
2886        return mDefaultMetadata[type];
2887    }
2888    //first time we are handling this request
2889    //fill up the metadata structure using the wrapper class
2890    CameraMetadata settings;
2891    //translate from cam_capability_t to camera_metadata_tag_t
2892    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2893    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2894    int32_t defaultRequestID = 0;
2895    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
2896
2897    /*control*/
2898
2899    uint8_t controlIntent = 0;
2900    switch (type) {
2901      case CAMERA3_TEMPLATE_PREVIEW:
2902        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2903        break;
2904      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2905        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2906        break;
2907      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2908        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2909        break;
2910      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2911        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2912        break;
2913      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2914        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2915        break;
2916      default:
2917        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2918        break;
2919    }
2920    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2921
2922    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2923            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2924
2925    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2926    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2927
2928    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2929    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2930
2931    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2932    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2933
2934    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2935    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2936
2937    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2938    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2939
2940    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2941    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2942
2943    static uint8_t focusMode;
2944    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2945        ALOGE("%s: Setting focus mode to auto", __func__);
2946        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2947    } else {
2948        ALOGE("%s: Setting focus mode to off", __func__);
2949        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2950    }
2951    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2952
2953    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2954    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2955
2956    /*flash*/
2957    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2958    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2959
2960    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2961    settings.update(ANDROID_FLASH_FIRING_POWER,
2962            &flashFiringLevel, 1);
2963
2964    /* lens */
2965    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2966    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2967
2968    if (gCamCapability[mCameraId]->filter_densities_count) {
2969        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2970        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2971                        gCamCapability[mCameraId]->filter_densities_count);
2972    }
2973
2974    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2975    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2976
2977    /* Exposure time(Update the Min Exposure Time)*/
2978    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
2979    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
2980
2981    /* frame duration */
2982    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2983    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2984
2985    /* sensitivity */
2986    static const int32_t default_sensitivity = 100;
2987    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2988
2989    /*edge mode*/
2990    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2991    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2992
2993    /*noise reduction mode*/
2994    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2995    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2996
2997    /*color correction mode*/
2998    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2999    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3000
3001    /*transform matrix mode*/
3002    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3003    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3004
3005    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3006    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3007
3008    mDefaultMetadata[type] = settings.release();
3009
3010    pthread_mutex_unlock(&mMutex);
3011    return mDefaultMetadata[type];
3012}
3013
3014/*===========================================================================
3015 * FUNCTION   : setFrameParameters
3016 *
3017 * DESCRIPTION: set parameters per frame as requested in the metadata from
3018 *              framework
3019 *
3020 * PARAMETERS :
3021 *   @request   : request that needs to be serviced
3022 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3023 *
3024 * RETURN     : success: NO_ERROR
3025 *              failure:
3026 *==========================================================================*/
3027int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3028                    uint32_t streamTypeMask)
3029{
3030    /*translate from camera_metadata_t type to parm_type_t*/
3031    int rc = 0;
3032    if (request->settings == NULL && mFirstRequest) {
3033        /*settings cannot be null for the first request*/
3034        return BAD_VALUE;
3035    }
3036
3037    int32_t hal_version = CAM_HAL_V3;
3038
3039    memset(mParameters, 0, sizeof(parm_buffer_t));
3040    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3041    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3042                sizeof(hal_version), &hal_version);
3043    if (rc < 0) {
3044        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3045        return BAD_VALUE;
3046    }
3047
3048    /*we need to update the frame number in the parameters*/
3049    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3050                                sizeof(request->frame_number), &(request->frame_number));
3051    if (rc < 0) {
3052        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3053        return BAD_VALUE;
3054    }
3055
3056    /* Update stream id mask where buffers are requested */
3057    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3058                                sizeof(streamTypeMask), &streamTypeMask);
3059    if (rc < 0) {
3060        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3061        return BAD_VALUE;
3062    }
3063
3064    if(request->settings != NULL){
3065        rc = translateMetadataToParameters(request);
3066    }
3067    /*set the parameters to backend*/
3068    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3069    return rc;
3070}
3071
3072/*===========================================================================
3073 * FUNCTION   : translateMetadataToParameters
3074 *
3075 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3076 *
3077 *
3078 * PARAMETERS :
3079 *   @request  : request sent from framework
3080 *
3081 *
3082 * RETURN     : success: NO_ERROR
3083 *              failure:
3084 *==========================================================================*/
3085int QCamera3HardwareInterface::translateMetadataToParameters
3086                                  (const camera3_capture_request_t *request)
3087{
3088    int rc = 0;
3089    CameraMetadata frame_settings;
3090    frame_settings = request->settings;
3091
3092    /* Do not change the order of the following list unless you know what you are
3093     * doing.
3094     * The order is laid out in such a way that parameters in the front of the table
3095     * may be used to override the parameters later in the table. Examples are:
3096     * 1. META_MODE should precede AEC/AWB/AF MODE
3097     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3098     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3099     * 4. Any mode should precede it's corresponding settings
3100     */
3101    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3102        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3103        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3104                sizeof(metaMode), &metaMode);
3105        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3106           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3107           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3108                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3109                                             fwk_sceneMode);
3110           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3111                sizeof(sceneMode), &sceneMode);
3112        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3113           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3114           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3115                sizeof(sceneMode), &sceneMode);
3116        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3117           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3118           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3119                sizeof(sceneMode), &sceneMode);
3120        }
3121    }
3122
3123    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3124        uint8_t fwk_aeMode =
3125            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3126        uint8_t aeMode;
3127        int32_t redeye;
3128
3129        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3130            aeMode = CAM_AE_MODE_OFF;
3131        } else {
3132            aeMode = CAM_AE_MODE_ON;
3133        }
3134        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3135            redeye = 1;
3136        } else {
3137            redeye = 0;
3138        }
3139
3140        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3141                                          sizeof(AE_FLASH_MODE_MAP),
3142                                          fwk_aeMode);
3143        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3144                sizeof(aeMode), &aeMode);
3145        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3146                sizeof(flashMode), &flashMode);
3147        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3148                sizeof(redeye), &redeye);
3149    }
3150
3151    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3152        uint8_t fwk_whiteLevel =
3153            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3154        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3155                sizeof(WHITE_BALANCE_MODES_MAP),
3156                fwk_whiteLevel);
3157        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3158                sizeof(whiteLevel), &whiteLevel);
3159    }
3160
3161    float focalDistance = -1.0;
3162    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3163        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3164        rc = AddSetParmEntryToBatch(mParameters,
3165                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3166                sizeof(focalDistance), &focalDistance);
3167    }
3168
3169    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3170        uint8_t fwk_focusMode =
3171            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3172        uint8_t focusMode;
3173        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3174            focusMode = CAM_FOCUS_MODE_INFINITY;
3175        } else{
3176         focusMode = lookupHalName(FOCUS_MODES_MAP,
3177                                   sizeof(FOCUS_MODES_MAP),
3178                                   fwk_focusMode);
3179        }
3180        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3181                sizeof(focusMode), &focusMode);
3182    }
3183
3184    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3185        int32_t antibandingMode =
3186            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3187        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3188                sizeof(antibandingMode), &antibandingMode);
3189    }
3190
3191    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3192        int32_t expCompensation = frame_settings.find(
3193            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3194        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3195            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3196        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3197            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3198        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3199          sizeof(expCompensation), &expCompensation);
3200    }
3201
3202    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3203        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3204        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3205                sizeof(aeLock), &aeLock);
3206    }
3207    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3208        cam_fps_range_t fps_range;
3209        fps_range.min_fps =
3210            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3211        fps_range.max_fps =
3212            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3213        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3214                sizeof(fps_range), &fps_range);
3215    }
3216
3217    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3218        uint8_t awbLock =
3219            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3220        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3221                sizeof(awbLock), &awbLock);
3222    }
3223
3224    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3225        uint8_t fwk_effectMode =
3226            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3227        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3228                sizeof(EFFECT_MODES_MAP),
3229                fwk_effectMode);
3230        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3231                sizeof(effectMode), &effectMode);
3232    }
3233
3234    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3235        uint8_t colorCorrectMode =
3236            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3237        rc =
3238            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3239                    sizeof(colorCorrectMode), &colorCorrectMode);
3240    }
3241
3242    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3243        cam_color_correct_gains_t colorCorrectGains;
3244        for (int i = 0; i < 4; i++) {
3245            colorCorrectGains.gains[i] =
3246                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3247        }
3248        rc =
3249            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3250                    sizeof(colorCorrectGains), &colorCorrectGains);
3251    }
3252
3253    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3254        cam_color_correct_matrix_t colorCorrectTransform;
3255        cam_rational_type_t transform_elem;
3256        int num = 0;
3257        for (int i = 0; i < 3; i++) {
3258           for (int j = 0; j < 3; j++) {
3259              transform_elem.numerator =
3260                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3261              transform_elem.denominator =
3262                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3263              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3264              num++;
3265           }
3266        }
3267        rc =
3268            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3269                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3270    }
3271
3272    cam_trigger_t aecTrigger;
3273    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3274    aecTrigger.trigger_id = -1;
3275    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3276        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3277        aecTrigger.trigger =
3278            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3279        aecTrigger.trigger_id =
3280            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3281    }
3282    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3283                                sizeof(aecTrigger), &aecTrigger);
3284
3285    /*af_trigger must come with a trigger id*/
3286    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3287        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3288        cam_trigger_t af_trigger;
3289        af_trigger.trigger =
3290            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3291        af_trigger.trigger_id =
3292            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3293        rc = AddSetParmEntryToBatch(mParameters,
3294                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3295    }
3296
3297    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3298        int32_t demosaic =
3299            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3300        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3301                sizeof(demosaic), &demosaic);
3302    }
3303
3304    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3305        cam_edge_application_t edge_application;
3306        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3307        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3308            edge_application.sharpness = 0;
3309        } else {
3310            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3311                int32_t edgeStrength =
3312                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3313                edge_application.sharpness = edgeStrength;
3314            } else {
3315                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3316            }
3317        }
3318        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3319                sizeof(edge_application), &edge_application);
3320    }
3321
3322    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3323        int32_t respectFlashMode = 1;
3324        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3325            uint8_t fwk_aeMode =
3326                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3327            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3328                respectFlashMode = 0;
3329                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3330                    __func__);
3331            }
3332        }
3333        if (respectFlashMode) {
3334            uint8_t flashMode =
3335                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3336            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3337                                          sizeof(FLASH_MODES_MAP),
3338                                          flashMode);
3339            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3340            // To check: CAM_INTF_META_FLASH_MODE usage
3341            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3342                          sizeof(flashMode), &flashMode);
3343        }
3344    }
3345
3346    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3347        uint8_t flashPower =
3348            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3350                sizeof(flashPower), &flashPower);
3351    }
3352
3353    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3354        int64_t flashFiringTime =
3355            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3356        rc = AddSetParmEntryToBatch(mParameters,
3357                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3358    }
3359
3360    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3361        uint8_t geometricMode =
3362            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3363        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3364                sizeof(geometricMode), &geometricMode);
3365    }
3366
3367    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3368        uint8_t geometricStrength =
3369            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3370        rc = AddSetParmEntryToBatch(mParameters,
3371                CAM_INTF_META_GEOMETRIC_STRENGTH,
3372                sizeof(geometricStrength), &geometricStrength);
3373    }
3374
3375    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3376        uint8_t hotPixelMode =
3377            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3378        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3379                sizeof(hotPixelMode), &hotPixelMode);
3380    }
3381
3382    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3383        float lensAperture =
3384            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3385        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3386                sizeof(lensAperture), &lensAperture);
3387    }
3388
3389    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3390        float filterDensity =
3391            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3392        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3393                sizeof(filterDensity), &filterDensity);
3394    }
3395
3396    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3397        float focalLength =
3398            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3399        rc = AddSetParmEntryToBatch(mParameters,
3400                CAM_INTF_META_LENS_FOCAL_LENGTH,
3401                sizeof(focalLength), &focalLength);
3402    }
3403
3404    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3405        uint8_t optStabMode =
3406            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3407        rc = AddSetParmEntryToBatch(mParameters,
3408                CAM_INTF_META_LENS_OPT_STAB_MODE,
3409                sizeof(optStabMode), &optStabMode);
3410    }
3411
3412    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3413        uint8_t noiseRedMode =
3414            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3415        rc = AddSetParmEntryToBatch(mParameters,
3416                CAM_INTF_META_NOISE_REDUCTION_MODE,
3417                sizeof(noiseRedMode), &noiseRedMode);
3418    }
3419
3420    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3421        uint8_t noiseRedStrength =
3422            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3423        rc = AddSetParmEntryToBatch(mParameters,
3424                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3425                sizeof(noiseRedStrength), &noiseRedStrength);
3426    }
3427
3428    cam_crop_region_t scalerCropRegion;
3429    bool scalerCropSet = false;
3430    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3431        scalerCropRegion.left =
3432            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3433        scalerCropRegion.top =
3434            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3435        scalerCropRegion.width =
3436            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3437        scalerCropRegion.height =
3438            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3439        rc = AddSetParmEntryToBatch(mParameters,
3440                CAM_INTF_META_SCALER_CROP_REGION,
3441                sizeof(scalerCropRegion), &scalerCropRegion);
3442        scalerCropSet = true;
3443    }
3444
3445    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3446        int64_t sensorExpTime =
3447            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3448        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3449        rc = AddSetParmEntryToBatch(mParameters,
3450                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3451                sizeof(sensorExpTime), &sensorExpTime);
3452    }
3453
3454    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3455        int64_t sensorFrameDuration =
3456            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3457        int64_t minFrameDuration = getMinFrameDuration(request);
3458        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3459        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3460            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3461        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3462        rc = AddSetParmEntryToBatch(mParameters,
3463                CAM_INTF_META_SENSOR_FRAME_DURATION,
3464                sizeof(sensorFrameDuration), &sensorFrameDuration);
3465    }
3466
3467    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3468        int32_t sensorSensitivity =
3469            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3470        if (sensorSensitivity <
3471                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3472            sensorSensitivity =
3473                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3474        if (sensorSensitivity >
3475                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3476            sensorSensitivity =
3477                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3478        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3479        rc = AddSetParmEntryToBatch(mParameters,
3480                CAM_INTF_META_SENSOR_SENSITIVITY,
3481                sizeof(sensorSensitivity), &sensorSensitivity);
3482    }
3483
3484    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3485        int32_t shadingMode =
3486            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3487        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3488                sizeof(shadingMode), &shadingMode);
3489    }
3490
3491    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3492        uint8_t shadingStrength =
3493            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3494        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3495                sizeof(shadingStrength), &shadingStrength);
3496    }
3497
3498    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3499        uint8_t fwk_facedetectMode =
3500            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3501        uint8_t facedetectMode =
3502            lookupHalName(FACEDETECT_MODES_MAP,
3503                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3504        rc = AddSetParmEntryToBatch(mParameters,
3505                CAM_INTF_META_STATS_FACEDETECT_MODE,
3506                sizeof(facedetectMode), &facedetectMode);
3507    }
3508
3509    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3510        uint8_t histogramMode =
3511            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3512        rc = AddSetParmEntryToBatch(mParameters,
3513                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3514                sizeof(histogramMode), &histogramMode);
3515    }
3516
3517    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3518        uint8_t sharpnessMapMode =
3519            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3520        rc = AddSetParmEntryToBatch(mParameters,
3521                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3522                sizeof(sharpnessMapMode), &sharpnessMapMode);
3523    }
3524
3525    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3526        uint8_t tonemapMode =
3527            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3528        rc = AddSetParmEntryToBatch(mParameters,
3529                CAM_INTF_META_TONEMAP_MODE,
3530                sizeof(tonemapMode), &tonemapMode);
3531    }
3532    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3533    /*All tonemap channels will have the same number of points*/
3534    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3535        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3536        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3537        cam_rgb_tonemap_curves tonemapCurves;
3538        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3539
3540        /* ch0 = G*/
3541        int point = 0;
3542        cam_tonemap_curve_t tonemapCurveGreen;
3543        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3544            for (int j = 0; j < 2; j++) {
3545               tonemapCurveGreen.tonemap_points[i][j] =
3546                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3547               point++;
3548            }
3549        }
3550        tonemapCurves.curves[0] = tonemapCurveGreen;
3551
3552        /* ch 1 = B */
3553        point = 0;
3554        cam_tonemap_curve_t tonemapCurveBlue;
3555        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3556            for (int j = 0; j < 2; j++) {
3557               tonemapCurveBlue.tonemap_points[i][j] =
3558                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3559               point++;
3560            }
3561        }
3562        tonemapCurves.curves[1] = tonemapCurveBlue;
3563
3564        /* ch 2 = R */
3565        point = 0;
3566        cam_tonemap_curve_t tonemapCurveRed;
3567        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3568            for (int j = 0; j < 2; j++) {
3569               tonemapCurveRed.tonemap_points[i][j] =
3570                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3571               point++;
3572            }
3573        }
3574        tonemapCurves.curves[2] = tonemapCurveRed;
3575
3576        rc = AddSetParmEntryToBatch(mParameters,
3577                CAM_INTF_META_TONEMAP_CURVES,
3578                sizeof(tonemapCurves), &tonemapCurves);
3579    }
3580
3581    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3582        uint8_t captureIntent =
3583            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3584        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3585                sizeof(captureIntent), &captureIntent);
3586    }
3587
3588    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3589        uint8_t blackLevelLock =
3590            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3591        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3592                sizeof(blackLevelLock), &blackLevelLock);
3593    }
3594
3595    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3596        uint8_t lensShadingMapMode =
3597            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3598        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3599                sizeof(lensShadingMapMode), &lensShadingMapMode);
3600    }
3601
3602    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3603        cam_area_t roi;
3604        bool reset = true;
3605        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3606        if (scalerCropSet) {
3607            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3608        }
3609        if (reset) {
3610            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3611                    sizeof(roi), &roi);
3612        }
3613    }
3614
3615    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3616        cam_area_t roi;
3617        bool reset = true;
3618        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3619        if (scalerCropSet) {
3620            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3621        }
3622        if (reset) {
3623            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3624                    sizeof(roi), &roi);
3625        }
3626    }
3627
3628    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3629        cam_area_t roi;
3630        bool reset = true;
3631        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3632        if (scalerCropSet) {
3633            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3634        }
3635        if (reset) {
3636            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3637                    sizeof(roi), &roi);
3638        }
3639    }
3640    return rc;
3641}
3642
3643/*===========================================================================
3644 * FUNCTION   : getJpegSettings
3645 *
3646 * DESCRIPTION: save the jpeg settings in the HAL
3647 *
3648 *
3649 * PARAMETERS :
3650 *   @settings  : frame settings information from framework
3651 *
3652 *
3653 * RETURN     : success: NO_ERROR
3654 *              failure:
3655 *==========================================================================*/
3656int QCamera3HardwareInterface::getJpegSettings
3657                                  (const camera_metadata_t *settings)
3658{
3659    if (mJpegSettings) {
3660        if (mJpegSettings->gps_timestamp) {
3661            free(mJpegSettings->gps_timestamp);
3662            mJpegSettings->gps_timestamp = NULL;
3663        }
3664        if (mJpegSettings->gps_coordinates) {
3665            for (int i = 0; i < 3; i++) {
3666                free(mJpegSettings->gps_coordinates[i]);
3667                mJpegSettings->gps_coordinates[i] = NULL;
3668            }
3669        }
3670        free(mJpegSettings);
3671        mJpegSettings = NULL;
3672    }
3673    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3674    CameraMetadata jpeg_settings;
3675    jpeg_settings = settings;
3676
3677    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3678        mJpegSettings->jpeg_orientation =
3679            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3680    } else {
3681        mJpegSettings->jpeg_orientation = 0;
3682    }
3683    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3684        mJpegSettings->jpeg_quality =
3685            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3686    } else {
3687        mJpegSettings->jpeg_quality = 85;
3688    }
3689    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3690        mJpegSettings->thumbnail_size.width =
3691            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3692        mJpegSettings->thumbnail_size.height =
3693            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3694    } else {
3695        mJpegSettings->thumbnail_size.width = 0;
3696        mJpegSettings->thumbnail_size.height = 0;
3697    }
3698    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3699        for (int i = 0; i < 3; i++) {
3700            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3701            *(mJpegSettings->gps_coordinates[i]) =
3702                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3703        }
3704    } else{
3705       for (int i = 0; i < 3; i++) {
3706            mJpegSettings->gps_coordinates[i] = NULL;
3707        }
3708    }
3709
3710    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3711        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3712        *(mJpegSettings->gps_timestamp) =
3713            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3714    } else {
3715        mJpegSettings->gps_timestamp = NULL;
3716    }
3717
3718    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3719        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3720        for (int i = 0; i < len; i++) {
3721            mJpegSettings->gps_processing_method[i] =
3722                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3723        }
3724        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3725            mJpegSettings->gps_processing_method[len] = '\0';
3726        }
3727    } else {
3728        mJpegSettings->gps_processing_method[0] = '\0';
3729    }
3730
3731    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3732        mJpegSettings->sensor_sensitivity =
3733            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3734    } else {
3735        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3736    }
3737
3738    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3739
3740    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3741        mJpegSettings->lens_focal_length =
3742            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3743    }
3744    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3745        mJpegSettings->exposure_compensation =
3746            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3747    }
3748    mJpegSettings->sharpness = 10; //default value
3749    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3750        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3751        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3752            mJpegSettings->sharpness = 0;
3753        }
3754    }
3755    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3756    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3757    mJpegSettings->is_jpeg_format = true;
3758    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3759    return 0;
3760}
3761
3762/*===========================================================================
3763 * FUNCTION   : captureResultCb
3764 *
3765 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3766 *
3767 * PARAMETERS :
3768 *   @frame  : frame information from mm-camera-interface
3769 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3770 *   @userdata: userdata
3771 *
3772 * RETURN     : NONE
3773 *==========================================================================*/
3774void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3775                camera3_stream_buffer_t *buffer,
3776                uint32_t frame_number, void *userdata)
3777{
3778    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3779    if (hw == NULL) {
3780        ALOGE("%s: Invalid hw %p", __func__, hw);
3781        return;
3782    }
3783
3784    hw->captureResultCb(metadata, buffer, frame_number);
3785    return;
3786}
3787
3788
3789/*===========================================================================
3790 * FUNCTION   : initialize
3791 *
3792 * DESCRIPTION: Pass framework callback pointers to HAL
3793 *
3794 * PARAMETERS :
3795 *
3796 *
3797 * RETURN     : Success : 0
3798 *              Failure: -ENODEV
3799 *==========================================================================*/
3800
3801int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3802                                  const camera3_callback_ops_t *callback_ops)
3803{
3804    ALOGV("%s: E", __func__);
3805    QCamera3HardwareInterface *hw =
3806        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3807    if (!hw) {
3808        ALOGE("%s: NULL camera device", __func__);
3809        return -ENODEV;
3810    }
3811
3812    int rc = hw->initialize(callback_ops);
3813    ALOGV("%s: X", __func__);
3814    return rc;
3815}
3816
3817/*===========================================================================
3818 * FUNCTION   : configure_streams
3819 *
3820 * DESCRIPTION:
3821 *
3822 * PARAMETERS :
3823 *
3824 *
3825 * RETURN     : Success: 0
3826 *              Failure: -EINVAL (if stream configuration is invalid)
3827 *                       -ENODEV (fatal error)
3828 *==========================================================================*/
3829
3830int QCamera3HardwareInterface::configure_streams(
3831        const struct camera3_device *device,
3832        camera3_stream_configuration_t *stream_list)
3833{
3834    ALOGV("%s: E", __func__);
3835    QCamera3HardwareInterface *hw =
3836        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3837    if (!hw) {
3838        ALOGE("%s: NULL camera device", __func__);
3839        return -ENODEV;
3840    }
3841    int rc = hw->configureStreams(stream_list);
3842    ALOGV("%s: X", __func__);
3843    return rc;
3844}
3845
3846/*===========================================================================
3847 * FUNCTION   : register_stream_buffers
3848 *
3849 * DESCRIPTION: Register stream buffers with the device
3850 *
3851 * PARAMETERS :
3852 *
3853 * RETURN     :
3854 *==========================================================================*/
3855int QCamera3HardwareInterface::register_stream_buffers(
3856        const struct camera3_device *device,
3857        const camera3_stream_buffer_set_t *buffer_set)
3858{
3859    ALOGV("%s: E", __func__);
3860    QCamera3HardwareInterface *hw =
3861        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3862    if (!hw) {
3863        ALOGE("%s: NULL camera device", __func__);
3864        return -ENODEV;
3865    }
3866    int rc = hw->registerStreamBuffers(buffer_set);
3867    ALOGV("%s: X", __func__);
3868    return rc;
3869}
3870
3871/*===========================================================================
3872 * FUNCTION   : construct_default_request_settings
3873 *
3874 * DESCRIPTION: Configure a settings buffer to meet the required use case
3875 *
3876 * PARAMETERS :
3877 *
3878 *
3879 * RETURN     : Success: Return valid metadata
3880 *              Failure: Return NULL
3881 *==========================================================================*/
3882const camera_metadata_t* QCamera3HardwareInterface::
3883    construct_default_request_settings(const struct camera3_device *device,
3884                                        int type)
3885{
3886
3887    ALOGV("%s: E", __func__);
3888    camera_metadata_t* fwk_metadata = NULL;
3889    QCamera3HardwareInterface *hw =
3890        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3891    if (!hw) {
3892        ALOGE("%s: NULL camera device", __func__);
3893        return NULL;
3894    }
3895
3896    fwk_metadata = hw->translateCapabilityToMetadata(type);
3897
3898    ALOGV("%s: X", __func__);
3899    return fwk_metadata;
3900}
3901
3902/*===========================================================================
3903 * FUNCTION   : process_capture_request
3904 *
3905 * DESCRIPTION:
3906 *
3907 * PARAMETERS :
3908 *
3909 *
3910 * RETURN     :
3911 *==========================================================================*/
3912int QCamera3HardwareInterface::process_capture_request(
3913                    const struct camera3_device *device,
3914                    camera3_capture_request_t *request)
3915{
3916    ALOGV("%s: E", __func__);
3917    QCamera3HardwareInterface *hw =
3918        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3919    if (!hw) {
3920        ALOGE("%s: NULL camera device", __func__);
3921        return -EINVAL;
3922    }
3923
3924    int rc = hw->processCaptureRequest(request);
3925    ALOGV("%s: X", __func__);
3926    return rc;
3927}
3928
3929/*===========================================================================
3930 * FUNCTION   : get_metadata_vendor_tag_ops
3931 *
3932 * DESCRIPTION:
3933 *
3934 * PARAMETERS :
3935 *
3936 *
3937 * RETURN     :
3938 *==========================================================================*/
3939
3940void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3941                const struct camera3_device *device,
3942                vendor_tag_query_ops_t* ops)
3943{
3944    ALOGV("%s: E", __func__);
3945    QCamera3HardwareInterface *hw =
3946        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3947    if (!hw) {
3948        ALOGE("%s: NULL camera device", __func__);
3949        return;
3950    }
3951
3952    hw->getMetadataVendorTagOps(ops);
3953    ALOGV("%s: X", __func__);
3954    return;
3955}
3956
3957/*===========================================================================
3958 * FUNCTION   : dump
3959 *
3960 * DESCRIPTION:
3961 *
3962 * PARAMETERS :
3963 *
3964 *
3965 * RETURN     :
3966 *==========================================================================*/
3967
3968void QCamera3HardwareInterface::dump(
3969                const struct camera3_device *device, int fd)
3970{
3971    ALOGV("%s: E", __func__);
3972    QCamera3HardwareInterface *hw =
3973        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3974    if (!hw) {
3975        ALOGE("%s: NULL camera device", __func__);
3976        return;
3977    }
3978
3979    hw->dump(fd);
3980    ALOGV("%s: X", __func__);
3981    return;
3982}
3983
3984/*===========================================================================
3985 * FUNCTION   : flush
3986 *
3987 * DESCRIPTION:
3988 *
3989 * PARAMETERS :
3990 *
3991 *
3992 * RETURN     :
3993 *==========================================================================*/
3994
3995int QCamera3HardwareInterface::flush(
3996                const struct camera3_device *device)
3997{
3998    int rc;
3999    ALOGV("%s: E", __func__);
4000    QCamera3HardwareInterface *hw =
4001        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4002    if (!hw) {
4003        ALOGE("%s: NULL camera device", __func__);
4004        return -EINVAL;
4005    }
4006
4007    rc = hw->flush();
4008    ALOGV("%s: X", __func__);
4009    return rc;
4010}
4011
4012/*===========================================================================
4013 * FUNCTION   : close_camera_device
4014 *
4015 * DESCRIPTION:
4016 *
4017 * PARAMETERS :
4018 *
4019 *
4020 * RETURN     :
4021 *==========================================================================*/
4022int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4023{
4024    ALOGV("%s: E", __func__);
4025    int ret = NO_ERROR;
4026    QCamera3HardwareInterface *hw =
4027        reinterpret_cast<QCamera3HardwareInterface *>(
4028            reinterpret_cast<camera3_device_t *>(device)->priv);
4029    if (!hw) {
4030        ALOGE("NULL camera device");
4031        return BAD_VALUE;
4032    }
4033    delete hw;
4034
4035    pthread_mutex_lock(&mCameraSessionLock);
4036    mCameraSessionActive = 0;
4037    pthread_mutex_unlock(&mCameraSessionLock);
4038    ALOGV("%s: X", __func__);
4039    return ret;
4040}
4041
4042/*===========================================================================
4043 * FUNCTION   : getWaveletDenoiseProcessPlate
4044 *
4045 * DESCRIPTION: query wavelet denoise process plate
4046 *
4047 * PARAMETERS : None
4048 *
4049 * RETURN     : WNR prcocess plate vlaue
4050 *==========================================================================*/
4051cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4052{
4053    char prop[PROPERTY_VALUE_MAX];
4054    memset(prop, 0, sizeof(prop));
4055    property_get("persist.denoise.process.plates", prop, "0");
4056    int processPlate = atoi(prop);
4057    switch(processPlate) {
4058    case 0:
4059        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4060    case 1:
4061        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4062    case 2:
4063        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4064    case 3:
4065        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4066    default:
4067        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4068    }
4069}
4070
4071/*===========================================================================
4072 * FUNCTION   : needRotationReprocess
4073 *
4074 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4075 *
4076 * PARAMETERS : none
4077 *
4078 * RETURN     : true: needed
4079 *              false: no need
4080 *==========================================================================*/
4081bool QCamera3HardwareInterface::needRotationReprocess()
4082{
4083
4084    if (!mJpegSettings->is_jpeg_format) {
4085        // RAW image, no need to reprocess
4086        return false;
4087    }
4088
4089    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4090        mJpegSettings->jpeg_orientation > 0) {
4091        // current rotation is not zero, and pp has the capability to process rotation
4092        ALOGD("%s: need do reprocess for rotation", __func__);
4093        return true;
4094    }
4095
4096    return false;
4097}
4098
4099/*===========================================================================
4100 * FUNCTION   : needReprocess
4101 *
4102 * DESCRIPTION: if reprocess in needed
4103 *
4104 * PARAMETERS : none
4105 *
4106 * RETURN     : true: needed
4107 *              false: no need
4108 *==========================================================================*/
4109bool QCamera3HardwareInterface::needReprocess()
4110{
4111    if (!mJpegSettings->is_jpeg_format) {
4112        // RAW image, no need to reprocess
4113        return false;
4114    }
4115
4116    if ((mJpegSettings->min_required_pp_mask > 0) ||
4117         isWNREnabled()) {
4118        // TODO: add for ZSL HDR later
4119        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4120        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4121        return true;
4122    }
4123    return needRotationReprocess();
4124}
4125
4126/*===========================================================================
4127 * FUNCTION   : addOnlineReprocChannel
4128 *
4129 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4130 *              coming from input channel
4131 *
4132 * PARAMETERS :
4133 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4134 *
4135 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4136 *==========================================================================*/
4137QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4138              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4139{
4140    int32_t rc = NO_ERROR;
4141    QCamera3ReprocessChannel *pChannel = NULL;
4142    if (pInputChannel == NULL) {
4143        ALOGE("%s: input channel obj is NULL", __func__);
4144        return NULL;
4145    }
4146
4147    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4148            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4149    if (NULL == pChannel) {
4150        ALOGE("%s: no mem for reprocess channel", __func__);
4151        return NULL;
4152    }
4153
4154    // Capture channel, only need snapshot and postview streams start together
4155    mm_camera_channel_attr_t attr;
4156    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4157    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4158    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4159    rc = pChannel->initialize();
4160    if (rc != NO_ERROR) {
4161        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4162        delete pChannel;
4163        return NULL;
4164    }
4165
4166    // pp feature config
4167    cam_pp_feature_config_t pp_config;
4168    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4169    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4170        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4171        pp_config.sharpness = mJpegSettings->sharpness;
4172    }
4173
4174    if (isWNREnabled()) {
4175        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4176        pp_config.denoise2d.denoise_enable = 1;
4177        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4178    }
4179    if (needRotationReprocess()) {
4180        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4181        int rotation = mJpegSettings->jpeg_orientation;
4182        if (rotation == 0) {
4183            pp_config.rotation = ROTATE_0;
4184        } else if (rotation == 90) {
4185            pp_config.rotation = ROTATE_90;
4186        } else if (rotation == 180) {
4187            pp_config.rotation = ROTATE_180;
4188        } else if (rotation == 270) {
4189            pp_config.rotation = ROTATE_270;
4190        }
4191    }
4192
4193   rc = pChannel->addReprocStreamsFromSource(pp_config,
4194                                             pInputChannel,
4195                                             mMetadataChannel);
4196
4197    if (rc != NO_ERROR) {
4198        delete pChannel;
4199        return NULL;
4200    }
4201    return pChannel;
4202}
4203
4204int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4205{
4206    return gCamCapability[mCameraId]->min_num_pp_bufs;
4207}
4208
4209bool QCamera3HardwareInterface::isWNREnabled() {
4210    return gCamCapability[mCameraId]->isWnrSupported;
4211}
4212
4213}; //end namespace qcamera
4214