QCamera3HWI.cpp revision 249d8d801ab218ff4920e57fdd4ccf17e1626057
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if ((*it)->registered && (*it)->buffer_set.buffers) {
240             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
241        }
242        if (channel)
243            delete channel;
244        free (*it);
245    }
246
247    mPictureChannel = NULL;
248
249    if (mJpegSettings != NULL) {
250        free(mJpegSettings);
251        mJpegSettings = NULL;
252    }
253
254    /* Clean up all channels */
255    if (mCameraInitialized) {
256        if (mMetadataChannel) {
257            mMetadataChannel->stop();
258            delete mMetadataChannel;
259            mMetadataChannel = NULL;
260        }
261        deinitParameters();
262    }
263
264    if (mCameraOpened)
265        closeCamera();
266
267    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
268        if (mDefaultMetadata[i])
269            free_camera_metadata(mDefaultMetadata[i]);
270
271    pthread_cond_destroy(&mRequestCond);
272
273    pthread_mutex_destroy(&mMutex);
274    ALOGV("%s: X", __func__);
275}
276
277/*===========================================================================
278 * FUNCTION   : openCamera
279 *
280 * DESCRIPTION: open camera
281 *
282 * PARAMETERS :
283 *   @hw_device  : double ptr for camera device struct
284 *
285 * RETURN     : int32_t type of status
286 *              NO_ERROR  -- success
287 *              none-zero failure code
288 *==========================================================================*/
289int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
290{
291    int rc = 0;
292    pthread_mutex_lock(&mCameraSessionLock);
293    if (mCameraSessionActive) {
294        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
295        pthread_mutex_unlock(&mCameraSessionLock);
296        return -EUSERS;
297    }
298
299    if (mCameraOpened) {
300        *hw_device = NULL;
301        return PERMISSION_DENIED;
302    }
303
304    rc = openCamera();
305    if (rc == 0) {
306        *hw_device = &mCameraDevice.common;
307        mCameraSessionActive = 1;
308    } else
309        *hw_device = NULL;
310
311#ifdef HAS_MULTIMEDIA_HINTS
312    if (rc == 0) {
313        if (m_pPowerModule) {
314            if (m_pPowerModule->powerHint) {
315                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
316                        (void *)"state=1");
317            }
318        }
319    }
320#endif
321    pthread_mutex_unlock(&mCameraSessionLock);
322    return rc;
323}
324
325/*===========================================================================
326 * FUNCTION   : openCamera
327 *
328 * DESCRIPTION: open camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::openCamera()
337{
338    if (mCameraHandle) {
339        ALOGE("Failure: Camera already opened");
340        return ALREADY_EXISTS;
341    }
342    mCameraHandle = camera_open(mCameraId);
343    if (!mCameraHandle) {
344        ALOGE("camera_open failed.");
345        return UNKNOWN_ERROR;
346    }
347
348    mCameraOpened = true;
349
350    return NO_ERROR;
351}
352
353/*===========================================================================
354 * FUNCTION   : closeCamera
355 *
356 * DESCRIPTION: close camera
357 *
358 * PARAMETERS : none
359 *
360 * RETURN     : int32_t type of status
361 *              NO_ERROR  -- success
362 *              none-zero failure code
363 *==========================================================================*/
364int QCamera3HardwareInterface::closeCamera()
365{
366    int rc = NO_ERROR;
367
368    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
369    mCameraHandle = NULL;
370    mCameraOpened = false;
371
372#ifdef HAS_MULTIMEDIA_HINTS
373    if (rc == NO_ERROR) {
374        if (m_pPowerModule) {
375            if (m_pPowerModule->powerHint) {
376                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
377                        (void *)"state=0");
378            }
379        }
380    }
381#endif
382
383    return rc;
384}
385
386/*===========================================================================
387 * FUNCTION   : initialize
388 *
389 * DESCRIPTION: Initialize frameworks callback functions
390 *
391 * PARAMETERS :
392 *   @callback_ops : callback function to frameworks
393 *
394 * RETURN     :
395 *
396 *==========================================================================*/
397int QCamera3HardwareInterface::initialize(
398        const struct camera3_callback_ops *callback_ops)
399{
400    int rc;
401
402    pthread_mutex_lock(&mMutex);
403
404    rc = initParameters();
405    if (rc < 0) {
406        ALOGE("%s: initParamters failed %d", __func__, rc);
407       goto err1;
408    }
409    mCallbackOps = callback_ops;
410
411    pthread_mutex_unlock(&mMutex);
412    mCameraInitialized = true;
413    return 0;
414
415err1:
416    pthread_mutex_unlock(&mMutex);
417    return rc;
418}
419
420/*===========================================================================
421 * FUNCTION   : configureStreams
422 *
423 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
424 *              and output streams.
425 *
426 * PARAMETERS :
427 *   @stream_list : streams to be configured
428 *
429 * RETURN     :
430 *
431 *==========================================================================*/
432int QCamera3HardwareInterface::configureStreams(
433        camera3_stream_configuration_t *streamList)
434{
435    int rc = 0;
436    mIsZslMode = false;
437
438    // Sanity check stream_list
439    if (streamList == NULL) {
440        ALOGE("%s: NULL stream configuration", __func__);
441        return BAD_VALUE;
442    }
443    if (streamList->streams == NULL) {
444        ALOGE("%s: NULL stream list", __func__);
445        return BAD_VALUE;
446    }
447
448    if (streamList->num_streams < 1) {
449        ALOGE("%s: Bad number of streams requested: %d", __func__,
450                streamList->num_streams);
451        return BAD_VALUE;
452    }
453
454    /* first invalidate all the steams in the mStreamList
455     * if they appear again, they will be validated */
456    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
457            it != mStreamInfo.end(); it++) {
458        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
459        channel->stop();
460        (*it)->status = INVALID;
461    }
462    if (mMetadataChannel) {
463        /* If content of mStreamInfo is not 0, there is metadata stream */
464        mMetadataChannel->stop();
465    }
466
467    pthread_mutex_lock(&mMutex);
468
469    camera3_stream_t *inputStream = NULL;
470    camera3_stream_t *jpegStream = NULL;
471    cam_stream_size_info_t stream_config_info;
472
473    for (size_t i = 0; i < streamList->num_streams; i++) {
474        camera3_stream_t *newStream = streamList->streams[i];
475        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
476                __func__, newStream->stream_type, newStream->format,
477                 newStream->width, newStream->height);
478        //if the stream is in the mStreamList validate it
479        bool stream_exists = false;
480        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
481                it != mStreamInfo.end(); it++) {
482            if ((*it)->stream == newStream) {
483                QCamera3Channel *channel =
484                    (QCamera3Channel*)(*it)->stream->priv;
485                stream_exists = true;
486                (*it)->status = RECONFIGURE;
487                /*delete the channel object associated with the stream because
488                  we need to reconfigure*/
489                delete channel;
490                (*it)->stream->priv = NULL;
491            }
492        }
493        if (!stream_exists) {
494            //new stream
495            stream_info_t* stream_info;
496            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
497            stream_info->stream = newStream;
498            stream_info->status = VALID;
499            stream_info->registered = 0;
500            mStreamInfo.push_back(stream_info);
501        }
502        if (newStream->stream_type == CAMERA3_STREAM_INPUT
503                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
504            if (inputStream != NULL) {
505                ALOGE("%s: Multiple input streams requested!", __func__);
506                pthread_mutex_unlock(&mMutex);
507                return BAD_VALUE;
508            }
509            inputStream = newStream;
510        }
511        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
512            jpegStream = newStream;
513        }
514    }
515    mInputStream = inputStream;
516
517    /*clean up invalid streams*/
518    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
519            it != mStreamInfo.end();) {
520        if(((*it)->status) == INVALID){
521            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
522            delete channel;
523            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
524            free(*it);
525            it = mStreamInfo.erase(it);
526        } else {
527            it++;
528        }
529    }
530    if (mMetadataChannel) {
531        delete mMetadataChannel;
532        mMetadataChannel = NULL;
533    }
534
535    //Create metadata channel and initialize it
536    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
537                    mCameraHandle->ops, captureResultCb,
538                    &gCamCapability[mCameraId]->padding_info, this);
539    if (mMetadataChannel == NULL) {
540        ALOGE("%s: failed to allocate metadata channel", __func__);
541        rc = -ENOMEM;
542        pthread_mutex_unlock(&mMutex);
543        return rc;
544    }
545    rc = mMetadataChannel->initialize();
546    if (rc < 0) {
547        ALOGE("%s: metadata channel initialization failed", __func__);
548        delete mMetadataChannel;
549        pthread_mutex_unlock(&mMutex);
550        return rc;
551    }
552
553    /* Allocate channel objects for the requested streams */
554    for (size_t i = 0; i < streamList->num_streams; i++) {
555        camera3_stream_t *newStream = streamList->streams[i];
556        uint32_t stream_usage = newStream->usage;
557        stream_config_info.stream_sizes[i].width = newStream->width;
558        stream_config_info.stream_sizes[i].height = newStream->height;
559        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
560            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
561            //for zsl stream the size is jpeg size
562            stream_config_info.stream_sizes[i].width = jpegStream->width;
563            stream_config_info.stream_sizes[i].height = jpegStream->height;
564            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
565        } else {
566           //for non zsl streams find out the format
567           switch (newStream->format) {
568           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
569              {
570                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
571                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
572                 } else {
573                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
574                 }
575              }
576              break;
577           case HAL_PIXEL_FORMAT_YCbCr_420_888:
578              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
579              break;
580           case HAL_PIXEL_FORMAT_BLOB:
581              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
582              break;
583           default:
584              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
585              break;
586           }
587        }
588        if (newStream->priv == NULL) {
589            //New stream, construct channel
590            switch (newStream->stream_type) {
591            case CAMERA3_STREAM_INPUT:
592                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
593                break;
594            case CAMERA3_STREAM_BIDIRECTIONAL:
595                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
596                    GRALLOC_USAGE_HW_CAMERA_WRITE;
597                break;
598            case CAMERA3_STREAM_OUTPUT:
599                /* For video encoding stream, set read/write rarely
600                 * flag so that they may be set to un-cached */
601                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
602                    newStream->usage =
603                         (GRALLOC_USAGE_SW_READ_RARELY |
604                         GRALLOC_USAGE_SW_WRITE_RARELY |
605                         GRALLOC_USAGE_HW_CAMERA_WRITE);
606                else
607                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
608                break;
609            default:
610                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
611                break;
612            }
613
614            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
615                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
616                QCamera3Channel *channel;
617                switch (newStream->format) {
618                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
619                case HAL_PIXEL_FORMAT_YCbCr_420_888:
620                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
621                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
622                        jpegStream) {
623                        uint32_t width = jpegStream->width;
624                        uint32_t height = jpegStream->height;
625                        mIsZslMode = true;
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream,
629                            width, height);
630                    } else
631                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
632                            mCameraHandle->ops, captureResultCb,
633                            &gCamCapability[mCameraId]->padding_info, this, newStream);
634                    if (channel == NULL) {
635                        ALOGE("%s: allocation of channel failed", __func__);
636                        pthread_mutex_unlock(&mMutex);
637                        return -ENOMEM;
638                    }
639
640                    newStream->priv = channel;
641                    break;
642                case HAL_PIXEL_FORMAT_BLOB:
643                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
644                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
645                            mCameraHandle->ops, captureResultCb,
646                            &gCamCapability[mCameraId]->padding_info, this, newStream);
647                    if (mPictureChannel == NULL) {
648                        ALOGE("%s: allocation of channel failed", __func__);
649                        pthread_mutex_unlock(&mMutex);
650                        return -ENOMEM;
651                    }
652                    newStream->priv = (QCamera3Channel*)mPictureChannel;
653                    break;
654
655                //TODO: Add support for app consumed format?
656                default:
657                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
658                    break;
659                }
660            }
661        } else {
662            // Channel already exists for this stream
663            // Do nothing for now
664        }
665    }
666
667    int32_t hal_version = CAM_HAL_V3;
668    stream_config_info.num_streams = streamList->num_streams;
669
670    // settings/parameters don't carry over for new configureStreams
671    memset(mParameters, 0, sizeof(parm_buffer_t));
672
673    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
674    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
675                sizeof(hal_version), &hal_version);
676
677    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
678                sizeof(stream_config_info), &stream_config_info);
679
680    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
681
682    /*For the streams to be reconfigured we need to register the buffers
683      since the framework wont*/
684    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
685            it != mStreamInfo.end(); it++) {
686        if ((*it)->status == RECONFIGURE) {
687            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
688            /*only register buffers for streams that have already been
689              registered*/
690            if ((*it)->registered) {
691                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
692                        (*it)->buffer_set.buffers);
693                if (rc != NO_ERROR) {
694                    ALOGE("%s: Failed to register the buffers of old stream,\
695                            rc = %d", __func__, rc);
696                }
697                ALOGV("%s: channel %p has %d buffers",
698                        __func__, channel, (*it)->buffer_set.num_buffers);
699            }
700        }
701
702        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
703        if (index == NAME_NOT_FOUND) {
704            mPendingBuffersMap.add((*it)->stream, 0);
705        } else {
706            mPendingBuffersMap.editValueAt(index) = 0;
707        }
708    }
709
710    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
711    mPendingRequestsList.clear();
712
713    /*flush the metadata list*/
714    if (!mStoredMetadataList.empty()) {
715        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
716              m != mStoredMetadataList.end(); m++) {
717            mMetadataChannel->bufDone(m->meta_buf);
718            free(m->meta_buf);
719            m = mStoredMetadataList.erase(m);
720        }
721    }
722
723    mFirstRequest = true;
724
725    //Get min frame duration for this streams configuration
726    deriveMinFrameDuration();
727
728    pthread_mutex_unlock(&mMutex);
729    return rc;
730}
731
732/*===========================================================================
733 * FUNCTION   : validateCaptureRequest
734 *
735 * DESCRIPTION: validate a capture request from camera service
736 *
737 * PARAMETERS :
738 *   @request : request from framework to process
739 *
740 * RETURN     :
741 *
742 *==========================================================================*/
743int QCamera3HardwareInterface::validateCaptureRequest(
744                    camera3_capture_request_t *request)
745{
746    ssize_t idx = 0;
747    const camera3_stream_buffer_t *b;
748    CameraMetadata meta;
749
750    /* Sanity check the request */
751    if (request == NULL) {
752        ALOGE("%s: NULL capture request", __func__);
753        return BAD_VALUE;
754    }
755
756    uint32_t frameNumber = request->frame_number;
757    if (request->input_buffer != NULL &&
758            request->input_buffer->stream != mInputStream) {
759        ALOGE("%s: Request %d: Input buffer not from input stream!",
760                __FUNCTION__, frameNumber);
761        return BAD_VALUE;
762    }
763    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
764        ALOGE("%s: Request %d: No output buffers provided!",
765                __FUNCTION__, frameNumber);
766        return BAD_VALUE;
767    }
768    if (request->input_buffer != NULL) {
769        b = request->input_buffer;
770        QCamera3Channel *channel =
771            static_cast<QCamera3Channel*>(b->stream->priv);
772        if (channel == NULL) {
773            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
774                    __func__, frameNumber, idx);
775            return BAD_VALUE;
776        }
777        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
778            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
779                    __func__, frameNumber, idx);
780            return BAD_VALUE;
781        }
782        if (b->release_fence != -1) {
783            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
784                    __func__, frameNumber, idx);
785            return BAD_VALUE;
786        }
787        if (b->buffer == NULL) {
788            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
789                    __func__, frameNumber, idx);
790            return BAD_VALUE;
791        }
792    }
793
794    // Validate all buffers
795    b = request->output_buffers;
796    do {
797        QCamera3Channel *channel =
798                static_cast<QCamera3Channel*>(b->stream->priv);
799        if (channel == NULL) {
800            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
801                    __func__, frameNumber, idx);
802            return BAD_VALUE;
803        }
804        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
805            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
806                    __func__, frameNumber, idx);
807            return BAD_VALUE;
808        }
809        if (b->release_fence != -1) {
810            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
811                    __func__, frameNumber, idx);
812            return BAD_VALUE;
813        }
814        if (b->buffer == NULL) {
815            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
816                    __func__, frameNumber, idx);
817            return BAD_VALUE;
818        }
819        idx++;
820        b = request->output_buffers + idx;
821    } while (idx < (ssize_t)request->num_output_buffers);
822
823    return NO_ERROR;
824}
825
826/*===========================================================================
827 * FUNCTION   : deriveMinFrameDuration
828 *
829 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
830 *              on currently configured streams.
831 *
832 * PARAMETERS : NONE
833 *
834 * RETURN     : NONE
835 *
836 *==========================================================================*/
837void QCamera3HardwareInterface::deriveMinFrameDuration()
838{
839    int32_t maxJpegDimension, maxProcessedDimension;
840
841    maxJpegDimension = 0;
842    maxProcessedDimension = 0;
843
844    // Figure out maximum jpeg, processed, and raw dimensions
845    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
846        it != mStreamInfo.end(); it++) {
847
848        // Input stream doesn't have valid stream_type
849        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
850            continue;
851
852        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
853        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
854            if (dimension > maxJpegDimension)
855                maxJpegDimension = dimension;
856        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
857            if (dimension > maxProcessedDimension)
858                maxProcessedDimension = dimension;
859        }
860    }
861
862    //Assume all jpeg dimensions are in processed dimensions.
863    if (maxJpegDimension > maxProcessedDimension)
864        maxProcessedDimension = maxJpegDimension;
865
866    //Find minimum durations for processed, jpeg, and raw
867    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
868    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
869        if (maxProcessedDimension ==
870            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
871            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
872            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
873            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
874            break;
875        }
876    }
877}
878
879/*===========================================================================
880 * FUNCTION   : getMinFrameDuration
881 *
882 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
883 *              and current request configuration.
884 *
885 * PARAMETERS : @request: requset sent by the frameworks
886 *
887 * RETURN     : min farme duration for a particular request
888 *
889 *==========================================================================*/
890int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
891{
892    bool hasJpegStream = false;
893    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
894        const camera3_stream_t *stream = request->output_buffers[i].stream;
895        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
896            hasJpegStream = true;
897    }
898
899    if (!hasJpegStream)
900        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
901    else
902        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
903}
904
905/*===========================================================================
906 * FUNCTION   : registerStreamBuffers
907 *
908 * DESCRIPTION: Register buffers for a given stream with the HAL device.
909 *
910 * PARAMETERS :
911 *   @stream_list : streams to be configured
912 *
913 * RETURN     :
914 *
915 *==========================================================================*/
916int QCamera3HardwareInterface::registerStreamBuffers(
917        const camera3_stream_buffer_set_t *buffer_set)
918{
919    int rc = 0;
920
921    pthread_mutex_lock(&mMutex);
922
923    if (buffer_set == NULL) {
924        ALOGE("%s: Invalid buffer_set parameter.", __func__);
925        pthread_mutex_unlock(&mMutex);
926        return -EINVAL;
927    }
928    if (buffer_set->stream == NULL) {
929        ALOGE("%s: Invalid stream parameter.", __func__);
930        pthread_mutex_unlock(&mMutex);
931        return -EINVAL;
932    }
933    if (buffer_set->num_buffers < 1) {
934        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
935        pthread_mutex_unlock(&mMutex);
936        return -EINVAL;
937    }
938    if (buffer_set->buffers == NULL) {
939        ALOGE("%s: Invalid buffers parameter.", __func__);
940        pthread_mutex_unlock(&mMutex);
941        return -EINVAL;
942    }
943
944    camera3_stream_t *stream = buffer_set->stream;
945    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
946
947    //set the buffer_set in the mStreamInfo array
948    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
949            it != mStreamInfo.end(); it++) {
950        if ((*it)->stream == stream) {
951            uint32_t numBuffers = buffer_set->num_buffers;
952            (*it)->buffer_set.stream = buffer_set->stream;
953            (*it)->buffer_set.num_buffers = numBuffers;
954            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
955            if ((*it)->buffer_set.buffers == NULL) {
956                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
957                pthread_mutex_unlock(&mMutex);
958                return -ENOMEM;
959            }
960            for (size_t j = 0; j < numBuffers; j++){
961                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
962            }
963            (*it)->registered = 1;
964        }
965    }
966    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
967    if (rc < 0) {
968        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
969        pthread_mutex_unlock(&mMutex);
970        return -ENODEV;
971    }
972
973    pthread_mutex_unlock(&mMutex);
974    return NO_ERROR;
975}
976
977/*===========================================================================
978 * FUNCTION   : processCaptureRequest
979 *
980 * DESCRIPTION: process a capture request from camera service
981 *
982 * PARAMETERS :
983 *   @request : request from framework to process
984 *
985 * RETURN     :
986 *
987 *==========================================================================*/
988int QCamera3HardwareInterface::processCaptureRequest(
989                    camera3_capture_request_t *request)
990{
991    int rc = NO_ERROR;
992    int32_t request_id;
993    CameraMetadata meta;
994    MetadataBufferInfo reproc_meta;
995    int queueMetadata = 0;
996
997    pthread_mutex_lock(&mMutex);
998
999    rc = validateCaptureRequest(request);
1000    if (rc != NO_ERROR) {
1001        ALOGE("%s: incoming request is not valid", __func__);
1002        pthread_mutex_unlock(&mMutex);
1003        return rc;
1004    }
1005
1006    meta = request->settings;
1007
1008    // For first capture request, send capture intent, and
1009    // stream on all streams
1010    if (mFirstRequest) {
1011
1012        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1013            int32_t hal_version = CAM_HAL_V3;
1014            uint8_t captureIntent =
1015                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1016
1017            memset(mParameters, 0, sizeof(parm_buffer_t));
1018            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1019            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1020                sizeof(hal_version), &hal_version);
1021            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1022                sizeof(captureIntent), &captureIntent);
1023            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1024                mParameters);
1025        }
1026
1027        mMetadataChannel->start();
1028        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1029            it != mStreamInfo.end(); it++) {
1030            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1031            channel->start();
1032        }
1033    }
1034
1035    uint32_t frameNumber = request->frame_number;
1036    uint32_t streamTypeMask = 0;
1037
1038    if (meta.exists(ANDROID_REQUEST_ID)) {
1039        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1040        mCurrentRequestId = request_id;
1041        ALOGV("%s: Received request with id: %d",__func__, request_id);
1042    } else if (mFirstRequest || mCurrentRequestId == -1){
1043        ALOGE("%s: Unable to find request id field, \
1044                & no previous id available", __func__);
1045        return NAME_NOT_FOUND;
1046    } else {
1047        ALOGV("%s: Re-using old request id", __func__);
1048        request_id = mCurrentRequestId;
1049    }
1050
1051    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1052                                    __func__, __LINE__,
1053                                    request->num_output_buffers,
1054                                    request->input_buffer,
1055                                    frameNumber);
1056    // Acquire all request buffers first
1057    int blob_request = 0;
1058    for (size_t i = 0; i < request->num_output_buffers; i++) {
1059        const camera3_stream_buffer_t& output = request->output_buffers[i];
1060        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1061        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1062
1063        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1064        //Call function to store local copy of jpeg data for encode params.
1065            blob_request = 1;
1066            rc = getJpegSettings(request->settings);
1067            if (rc < 0) {
1068                ALOGE("%s: failed to get jpeg parameters", __func__);
1069                pthread_mutex_unlock(&mMutex);
1070                return rc;
1071            }
1072        }
1073
1074        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1075        if (rc != OK) {
1076            ALOGE("%s: fence wait failed %d", __func__, rc);
1077            pthread_mutex_unlock(&mMutex);
1078            return rc;
1079        }
1080        streamTypeMask |= channel->getStreamTypeMask();
1081    }
1082
1083    rc = setFrameParameters(request, streamTypeMask);
1084    if (rc < 0) {
1085        ALOGE("%s: fail to set frame parameters", __func__);
1086        pthread_mutex_unlock(&mMutex);
1087        return rc;
1088    }
1089
1090    /* Update pending request list and pending buffers map */
1091    PendingRequestInfo pendingRequest;
1092    pendingRequest.frame_number = frameNumber;
1093    pendingRequest.num_buffers = request->num_output_buffers;
1094    pendingRequest.request_id = request_id;
1095    pendingRequest.blob_request = blob_request;
1096    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1097
1098    for (size_t i = 0; i < request->num_output_buffers; i++) {
1099        RequestedBufferInfo requestedBuf;
1100        requestedBuf.stream = request->output_buffers[i].stream;
1101        requestedBuf.buffer = NULL;
1102        pendingRequest.buffers.push_back(requestedBuf);
1103
1104        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1105    }
1106    mPendingRequestsList.push_back(pendingRequest);
1107
1108    // Notify metadata channel we receive a request
1109    mMetadataChannel->request(NULL, frameNumber);
1110
1111    // Call request on other streams
1112    for (size_t i = 0; i < request->num_output_buffers; i++) {
1113        const camera3_stream_buffer_t& output = request->output_buffers[i];
1114        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1115        mm_camera_buf_def_t *pInputBuffer = NULL;
1116
1117        if (channel == NULL) {
1118            ALOGE("%s: invalid channel pointer for stream", __func__);
1119            continue;
1120        }
1121
1122        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1123            QCamera3RegularChannel* inputChannel = NULL;
1124            if(request->input_buffer != NULL){
1125                //Try to get the internal format
1126                inputChannel = (QCamera3RegularChannel*)
1127                    request->input_buffer->stream->priv;
1128                if(inputChannel == NULL ){
1129                    ALOGE("%s: failed to get input channel handle", __func__);
1130                } else {
1131                    pInputBuffer =
1132                        inputChannel->getInternalFormatBuffer(
1133                                request->input_buffer->buffer);
1134                    ALOGD("%s: Input buffer dump",__func__);
1135                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1136                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1137                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1138                    ALOGD("Handle:%p", request->input_buffer->buffer);
1139                    //TODO: need to get corresponding metadata and send it to pproc
1140                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1141                         m != mStoredMetadataList.end(); m++) {
1142                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1143                            reproc_meta.meta_buf = m->meta_buf;
1144                            queueMetadata = 1;
1145                            break;
1146                        }
1147                    }
1148                }
1149            }
1150            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1151                            pInputBuffer,(QCamera3Channel*)inputChannel);
1152            if (queueMetadata) {
1153                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1154            }
1155        } else {
1156            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1157                __LINE__, output.buffer, frameNumber);
1158            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1159                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1160                     m != mStoredMetadataList.end(); m++) {
1161                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1162                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1163                            mMetadataChannel->bufDone(m->meta_buf);
1164                            free(m->meta_buf);
1165                            m = mStoredMetadataList.erase(m);
1166                            break;
1167                        }
1168                   }
1169                }
1170            }
1171            rc = channel->request(output.buffer, frameNumber);
1172        }
1173        if (rc < 0)
1174            ALOGE("%s: request failed", __func__);
1175    }
1176
1177    mFirstRequest = false;
1178    // Added a timed condition wait
1179    struct timespec ts;
1180    uint8_t isValidTimeout = 1;
1181    rc = clock_gettime(CLOCK_REALTIME, &ts);
1182    if (rc < 0) {
1183        isValidTimeout = 0;
1184        ALOGE("%s: Error reading the real time clock!!", __func__);
1185    }
1186    else {
1187        // Make timeout as 5 sec for request to be honored
1188        ts.tv_sec += 5;
1189    }
1190    //Block on conditional variable
1191    mPendingRequest = 1;
1192    while (mPendingRequest == 1) {
1193        if (!isValidTimeout) {
1194            ALOGV("%s: Blocking on conditional wait", __func__);
1195            pthread_cond_wait(&mRequestCond, &mMutex);
1196        }
1197        else {
1198            ALOGV("%s: Blocking on timed conditional wait", __func__);
1199            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1200            if (rc == ETIMEDOUT) {
1201                rc = -ENODEV;
1202                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1203                break;
1204            }
1205        }
1206        ALOGV("%s: Unblocked", __func__);
1207    }
1208
1209    pthread_mutex_unlock(&mMutex);
1210    return rc;
1211}
1212
1213/*===========================================================================
1214 * FUNCTION   : getMetadataVendorTagOps
1215 *
1216 * DESCRIPTION:
1217 *
1218 * PARAMETERS :
1219 *
1220 *
1221 * RETURN     :
1222 *==========================================================================*/
1223void QCamera3HardwareInterface::getMetadataVendorTagOps(
1224                    vendor_tag_query_ops_t* /*ops*/)
1225{
1226    /* Enable locks when we eventually add Vendor Tags */
1227    /*
1228    pthread_mutex_lock(&mMutex);
1229
1230    pthread_mutex_unlock(&mMutex);
1231    */
1232    return;
1233}
1234
1235/*===========================================================================
1236 * FUNCTION   : dump
1237 *
1238 * DESCRIPTION:
1239 *
1240 * PARAMETERS :
1241 *
1242 *
1243 * RETURN     :
1244 *==========================================================================*/
1245void QCamera3HardwareInterface::dump(int /*fd*/)
1246{
1247    /*Enable lock when we implement this function*/
1248    /*
1249    pthread_mutex_lock(&mMutex);
1250
1251    pthread_mutex_unlock(&mMutex);
1252    */
1253    return;
1254}
1255
1256/*===========================================================================
1257 * FUNCTION   : flush
1258 *
1259 * DESCRIPTION:
1260 *
1261 * PARAMETERS :
1262 *
1263 *
1264 * RETURN     :
1265 *==========================================================================*/
1266int QCamera3HardwareInterface::flush()
1267{
1268    /*Enable lock when we implement this function*/
1269    /*
1270    pthread_mutex_lock(&mMutex);
1271
1272    pthread_mutex_unlock(&mMutex);
1273    */
1274    return 0;
1275}
1276
1277/*===========================================================================
1278 * FUNCTION   : captureResultCb
1279 *
1280 * DESCRIPTION: Callback handler for all capture result
1281 *              (streams, as well as metadata)
1282 *
1283 * PARAMETERS :
1284 *   @metadata : metadata information
1285 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1286 *               NULL if metadata.
1287 *
1288 * RETURN     : NONE
1289 *==========================================================================*/
1290void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1291                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1292{
1293    pthread_mutex_lock(&mMutex);
1294
1295    if (metadata_buf) {
1296        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1297        int32_t frame_number_valid = *(int32_t *)
1298            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1299        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1300            CAM_INTF_META_PENDING_REQUESTS, metadata);
1301        uint32_t frame_number = *(uint32_t *)
1302            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1303        const struct timeval *tv = (const struct timeval *)
1304            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1305        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1306            tv->tv_usec * NSEC_PER_USEC;
1307
1308        if (!frame_number_valid) {
1309            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1310            mMetadataChannel->bufDone(metadata_buf);
1311            free(metadata_buf);
1312            goto done_metadata;
1313        }
1314        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1315                frame_number, capture_time);
1316
1317        // Go through the pending requests info and send shutter/results to frameworks
1318        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1319                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1320            camera3_capture_result_t result;
1321            camera3_notify_msg_t notify_msg;
1322            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1323
1324            // Flush out all entries with less or equal frame numbers.
1325
1326            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1327            //Right now it's the same as metadata timestamp
1328
1329            //TODO: When there is metadata drop, how do we derive the timestamp of
1330            //dropped frames? For now, we fake the dropped timestamp by substracting
1331            //from the reported timestamp
1332            nsecs_t current_capture_time = capture_time -
1333                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1334
1335            // Send shutter notify to frameworks
1336            notify_msg.type = CAMERA3_MSG_SHUTTER;
1337            notify_msg.message.shutter.frame_number = i->frame_number;
1338            notify_msg.message.shutter.timestamp = current_capture_time;
1339            mCallbackOps->notify(mCallbackOps, &notify_msg);
1340            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1341                    i->frame_number, capture_time);
1342
1343            // Send empty metadata with already filled buffers for dropped metadata
1344            // and send valid metadata with already filled buffers for current metadata
1345            if (i->frame_number < frame_number) {
1346                CameraMetadata dummyMetadata;
1347                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1348                        &current_capture_time, 1);
1349                dummyMetadata.update(ANDROID_REQUEST_ID,
1350                        &(i->request_id), 1);
1351                result.result = dummyMetadata.release();
1352            } else {
1353                result.result = translateCbMetadataToResultMetadata(metadata,
1354                        current_capture_time, i->request_id);
1355                if (mIsZslMode) {
1356                   int found_metadata = 0;
1357                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1358                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1359                        j != i->buffers.end(); j++) {
1360                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1361                         //check if corresp. zsl already exists in the stored metadata list
1362                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1363                               m != mStoredMetadataList.begin(); m++) {
1364                            if (m->frame_number == frame_number) {
1365                               m->meta_buf = metadata_buf;
1366                               found_metadata = 1;
1367                               break;
1368                            }
1369                         }
1370                         if (!found_metadata) {
1371                            MetadataBufferInfo store_meta_info;
1372                            store_meta_info.meta_buf = metadata_buf;
1373                            store_meta_info.frame_number = frame_number;
1374                            mStoredMetadataList.push_back(store_meta_info);
1375                            found_metadata = 1;
1376                         }
1377                      }
1378                   }
1379                   if (!found_metadata) {
1380                       if (!i->input_buffer_present && i->blob_request) {
1381                          //livesnapshot or fallback non-zsl snapshot case
1382                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1383                                j != i->buffers.end(); j++){
1384                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1385                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1386                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1387                                 break;
1388                              }
1389                         }
1390                       } else {
1391                            //return the metadata immediately
1392                            mMetadataChannel->bufDone(metadata_buf);
1393                            free(metadata_buf);
1394                       }
1395                   }
1396               } else if (!mIsZslMode && i->blob_request) {
1397                   //If it is a blob request then send the metadata to the picture channel
1398                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1399               } else {
1400                   // Return metadata buffer
1401                   mMetadataChannel->bufDone(metadata_buf);
1402                   free(metadata_buf);
1403               }
1404
1405            }
1406            if (!result.result) {
1407                ALOGE("%s: metadata is NULL", __func__);
1408            }
1409            result.frame_number = i->frame_number;
1410            result.num_output_buffers = 0;
1411            result.output_buffers = NULL;
1412            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1413                    j != i->buffers.end(); j++) {
1414                if (j->buffer) {
1415                    result.num_output_buffers++;
1416                }
1417            }
1418
1419            if (result.num_output_buffers > 0) {
1420                camera3_stream_buffer_t *result_buffers =
1421                    new camera3_stream_buffer_t[result.num_output_buffers];
1422                if (!result_buffers) {
1423                    ALOGE("%s: Fatal error: out of memory", __func__);
1424                }
1425                size_t result_buffers_idx = 0;
1426                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1427                        j != i->buffers.end(); j++) {
1428                    if (j->buffer) {
1429                        result_buffers[result_buffers_idx++] = *(j->buffer);
1430                        free(j->buffer);
1431                        j->buffer = NULL;
1432                        mPendingBuffersMap.editValueFor(j->stream)--;
1433                    }
1434                }
1435                result.output_buffers = result_buffers;
1436
1437                mCallbackOps->process_capture_result(mCallbackOps, &result);
1438                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1439                        __func__, result.frame_number, current_capture_time);
1440                free_camera_metadata((camera_metadata_t *)result.result);
1441                delete[] result_buffers;
1442            } else {
1443                mCallbackOps->process_capture_result(mCallbackOps, &result);
1444                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1445                        __func__, result.frame_number, current_capture_time);
1446                free_camera_metadata((camera_metadata_t *)result.result);
1447            }
1448            // erase the element from the list
1449            i = mPendingRequestsList.erase(i);
1450        }
1451
1452
1453done_metadata:
1454        bool max_buffers_dequeued = false;
1455        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1456            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1457            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1458            if (queued_buffers == stream->max_buffers) {
1459                max_buffers_dequeued = true;
1460                break;
1461            }
1462        }
1463        if (!max_buffers_dequeued && !pending_requests) {
1464            // Unblock process_capture_request
1465            mPendingRequest = 0;
1466            pthread_cond_signal(&mRequestCond);
1467        }
1468    } else {
1469        // If the frame number doesn't exist in the pending request list,
1470        // directly send the buffer to the frameworks, and update pending buffers map
1471        // Otherwise, book-keep the buffer.
1472        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1473        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1474            i++;
1475        }
1476        if (i == mPendingRequestsList.end()) {
1477            // Verify all pending requests frame_numbers are greater
1478            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1479                    j != mPendingRequestsList.end(); j++) {
1480                if (j->frame_number < frame_number) {
1481                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1482                            __func__, j->frame_number, frame_number);
1483                }
1484            }
1485            camera3_capture_result_t result;
1486            result.result = NULL;
1487            result.frame_number = frame_number;
1488            result.num_output_buffers = 1;
1489            result.output_buffers = buffer;
1490            ALOGV("%s: result frame_number = %d, buffer = %p",
1491                    __func__, frame_number, buffer);
1492            mPendingBuffersMap.editValueFor(buffer->stream)--;
1493            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1494                int found = 0;
1495                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1496                      k != mStoredMetadataList.end(); k++) {
1497                    if (k->frame_number == frame_number) {
1498                        k->zsl_buf_hdl = buffer->buffer;
1499                        found = 1;
1500                        break;
1501                    }
1502                }
1503                if (!found) {
1504                   MetadataBufferInfo meta_info;
1505                   meta_info.frame_number = frame_number;
1506                   meta_info.zsl_buf_hdl = buffer->buffer;
1507                   mStoredMetadataList.push_back(meta_info);
1508                }
1509            }
1510            mCallbackOps->process_capture_result(mCallbackOps, &result);
1511        } else {
1512            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1513                    j != i->buffers.end(); j++) {
1514                if (j->stream == buffer->stream) {
1515                    if (j->buffer != NULL) {
1516                        ALOGE("%s: Error: buffer is already set", __func__);
1517                    } else {
1518                        j->buffer = (camera3_stream_buffer_t *)malloc(
1519                                sizeof(camera3_stream_buffer_t));
1520                        *(j->buffer) = *buffer;
1521                        ALOGV("%s: cache buffer %p at result frame_number %d",
1522                                __func__, buffer, frame_number);
1523                    }
1524                }
1525            }
1526        }
1527    }
1528    pthread_mutex_unlock(&mMutex);
1529    return;
1530}
1531
1532/*===========================================================================
1533 * FUNCTION   : translateCbMetadataToResultMetadata
1534 *
1535 * DESCRIPTION:
1536 *
1537 * PARAMETERS :
1538 *   @metadata : metadata information from callback
1539 *
1540 * RETURN     : camera_metadata_t*
1541 *              metadata in a format specified by fwk
1542 *==========================================================================*/
1543camera_metadata_t*
1544QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1545                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1546                                 int32_t request_id)
1547{
1548    CameraMetadata camMetadata;
1549    camera_metadata_t* resultMetadata;
1550
1551    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1552    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1553
1554    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1555    uint8_t next_entry;
1556    while (curr_entry != CAM_INTF_PARM_MAX) {
1557       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1558       switch (curr_entry) {
1559         case CAM_INTF_META_FACE_DETECTION:{
1560             cam_face_detection_data_t *faceDetectionInfo =
1561                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1562             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1563             int32_t faceIds[numFaces];
1564             uint8_t faceScores[numFaces];
1565             int32_t faceRectangles[numFaces * 4];
1566             int32_t faceLandmarks[numFaces * 6];
1567             int j = 0, k = 0;
1568             for (int i = 0; i < numFaces; i++) {
1569                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1570                 faceScores[i] = faceDetectionInfo->faces[i].score;
1571                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1572                         faceRectangles+j, -1);
1573                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1574                 j+= 4;
1575                 k+= 6;
1576             }
1577             if (numFaces > 0) {
1578                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1579                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1580                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1581                     faceRectangles, numFaces*4);
1582                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1583                     faceLandmarks, numFaces*6);
1584             }
1585            break;
1586            }
1587         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1588             uint8_t  *color_correct_mode =
1589                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1590             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1591             break;
1592          }
1593         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1594             int32_t  *ae_precapture_id =
1595                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1596             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1597             break;
1598          }
1599         case CAM_INTF_META_AEC_ROI: {
1600            cam_area_t  *hAeRegions =
1601                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1602             int32_t aeRegions[5];
1603             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1604             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1605             break;
1606          }
1607          case CAM_INTF_META_AEC_STATE:{
1608             uint8_t *ae_state =
1609                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1610             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1611             break;
1612          }
1613          case CAM_INTF_PARM_FOCUS_MODE:{
1614             uint8_t  *focusMode =
1615                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1616             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1617                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1618             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1619             break;
1620          }
1621          case CAM_INTF_META_AF_ROI:{
1622             /*af regions*/
1623             cam_area_t  *hAfRegions =
1624                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1625             int32_t afRegions[5];
1626             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1627             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1628             break;
1629          }
1630          case CAM_INTF_META_AF_STATE: {
1631             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1632             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1633             break;
1634          }
1635          case CAM_INTF_META_AF_TRIGGER_ID: {
1636             int32_t  *afTriggerId =
1637                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1638             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1639             break;
1640          }
1641          case CAM_INTF_PARM_WHITE_BALANCE: {
1642               uint8_t  *whiteBalance =
1643                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1644               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1645                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1646                   *whiteBalance);
1647               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1648               break;
1649          }
1650          case CAM_INTF_META_AWB_REGIONS: {
1651             /*awb regions*/
1652             cam_area_t  *hAwbRegions =
1653                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1654             int32_t awbRegions[5];
1655             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1656             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1657             break;
1658          }
1659          case CAM_INTF_META_AWB_STATE: {
1660             uint8_t  *whiteBalanceState =
1661                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1662             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1663             break;
1664          }
1665          case CAM_INTF_META_MODE: {
1666             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1667             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1668             break;
1669          }
1670          case CAM_INTF_META_EDGE_MODE: {
1671             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1672             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1673             break;
1674          }
1675          case CAM_INTF_META_FLASH_POWER: {
1676             uint8_t  *flashPower =
1677                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1678             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1679             break;
1680          }
1681          case CAM_INTF_META_FLASH_FIRING_TIME: {
1682             int64_t  *flashFiringTime =
1683                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1684             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1685             break;
1686          }
1687          case CAM_INTF_META_FLASH_STATE: {
1688             uint8_t  *flashState =
1689                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1690             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1691             break;
1692          }
1693          case CAM_INTF_META_FLASH_MODE:{
1694             uint8_t *flashMode = (uint8_t*)
1695                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1696             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1697             break;
1698          }
1699          case CAM_INTF_META_HOTPIXEL_MODE: {
1700              uint8_t  *hotPixelMode =
1701                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1702              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1703              break;
1704          }
1705          case CAM_INTF_META_LENS_APERTURE:{
1706             float  *lensAperture =
1707                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1708             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1709             break;
1710          }
1711          case CAM_INTF_META_LENS_FILTERDENSITY: {
1712             float  *filterDensity =
1713                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1714             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1715             break;
1716          }
1717          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1718             float  *focalLength =
1719                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1720             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1721             break;
1722          }
1723          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1724             float  *focusDistance =
1725                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1726             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1727             break;
1728          }
1729          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1730             float  *focusRange =
1731                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1732             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1733          }
1734          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1735             uint8_t  *opticalStab =
1736                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1737             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1738          }
1739          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1740             uint8_t  *noiseRedMode =
1741                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1742             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1743             break;
1744          }
1745          case CAM_INTF_META_SCALER_CROP_REGION: {
1746             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1747             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1748             int32_t scalerCropRegion[4];
1749             scalerCropRegion[0] = hScalerCropRegion->left;
1750             scalerCropRegion[1] = hScalerCropRegion->top;
1751             scalerCropRegion[2] = hScalerCropRegion->width;
1752             scalerCropRegion[3] = hScalerCropRegion->height;
1753             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1754             break;
1755          }
1756          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1757             int64_t  *sensorExpTime =
1758                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1759             mMetadataResponse.exposure_time = *sensorExpTime;
1760             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1761             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1762             break;
1763          }
1764          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1765             int64_t  *sensorFameDuration =
1766                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1767             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1768             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1769             break;
1770          }
1771          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1772             int32_t  *sensorSensitivity =
1773                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1774             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1775             mMetadataResponse.iso_speed = *sensorSensitivity;
1776             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1777             break;
1778          }
1779          case CAM_INTF_META_SHADING_MODE: {
1780             uint8_t  *shadingMode =
1781                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1782             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1783             break;
1784          }
1785          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1786             uint8_t  *faceDetectMode =
1787                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1788             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1789                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1790                                                        *faceDetectMode);
1791             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1792             break;
1793          }
1794          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1795             uint8_t  *histogramMode =
1796                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1797             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1798             break;
1799          }
1800          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1801               uint8_t  *sharpnessMapMode =
1802                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1803               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1804                                  sharpnessMapMode, 1);
1805               break;
1806           }
1807          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1808               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1809               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1810               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1811                                  (int32_t*)sharpnessMap->sharpness,
1812                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1813               break;
1814          }
1815          case CAM_INTF_META_LENS_SHADING_MAP: {
1816               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1817               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1818               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1819               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1820               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1821                                  (float*)lensShadingMap->lens_shading,
1822                                  4*map_width*map_height);
1823               break;
1824          }
1825          case CAM_INTF_META_TONEMAP_CURVES:{
1826             //Populate CAM_INTF_META_TONEMAP_CURVES
1827             /* ch0 = G, ch 1 = B, ch 2 = R*/
1828             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1829             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1830             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1831                                (float*)tonemap->curves[0].tonemap_points,
1832                                tonemap->tonemap_points_cnt * 2);
1833
1834             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1835                                (float*)tonemap->curves[1].tonemap_points,
1836                                tonemap->tonemap_points_cnt * 2);
1837
1838             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1839                                (float*)tonemap->curves[2].tonemap_points,
1840                                tonemap->tonemap_points_cnt * 2);
1841             break;
1842          }
1843          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1844             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1845             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1846             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1847             break;
1848          }
1849          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1850              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1851              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1852              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1853                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1854              break;
1855          }
1856          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1857             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1858             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1859             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1860                       predColorCorrectionGains->gains, 4);
1861             break;
1862          }
1863          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1864             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1865                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1866             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1867                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1868             break;
1869
1870          }
1871          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1872             uint8_t *blackLevelLock = (uint8_t*)
1873               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1874             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1875             break;
1876          }
1877          case CAM_INTF_META_SCENE_FLICKER:{
1878             uint8_t *sceneFlicker = (uint8_t*)
1879             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1880             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1881             break;
1882          }
1883          case CAM_INTF_PARM_LED_MODE:
1884             break;
1885          default:
1886             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1887                   __func__, curr_entry);
1888             break;
1889       }
1890       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1891       curr_entry = next_entry;
1892    }
1893    resultMetadata = camMetadata.release();
1894    return resultMetadata;
1895}
1896
1897/*===========================================================================
1898 * FUNCTION   : convertToRegions
1899 *
1900 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1901 *
1902 * PARAMETERS :
1903 *   @rect   : cam_rect_t struct to convert
1904 *   @region : int32_t destination array
1905 *   @weight : if we are converting from cam_area_t, weight is valid
1906 *             else weight = -1
1907 *
1908 *==========================================================================*/
1909void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1910    region[0] = rect.left;
1911    region[1] = rect.top;
1912    region[2] = rect.left + rect.width;
1913    region[3] = rect.top + rect.height;
1914    if (weight > -1) {
1915        region[4] = weight;
1916    }
1917}
1918
1919/*===========================================================================
1920 * FUNCTION   : convertFromRegions
1921 *
1922 * DESCRIPTION: helper method to convert from array to cam_rect_t
1923 *
1924 * PARAMETERS :
1925 *   @rect   : cam_rect_t struct to convert
1926 *   @region : int32_t destination array
1927 *   @weight : if we are converting from cam_area_t, weight is valid
1928 *             else weight = -1
1929 *
1930 *==========================================================================*/
1931void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1932                                                   const camera_metadata_t *settings,
1933                                                   uint32_t tag){
1934    CameraMetadata frame_settings;
1935    frame_settings = settings;
1936    int32_t x_min = frame_settings.find(tag).data.i32[0];
1937    int32_t y_min = frame_settings.find(tag).data.i32[1];
1938    int32_t x_max = frame_settings.find(tag).data.i32[2];
1939    int32_t y_max = frame_settings.find(tag).data.i32[3];
1940    roi->weight = frame_settings.find(tag).data.i32[4];
1941    roi->rect.left = x_min;
1942    roi->rect.top = y_min;
1943    roi->rect.width = x_max - x_min;
1944    roi->rect.height = y_max - y_min;
1945}
1946
1947/*===========================================================================
1948 * FUNCTION   : resetIfNeededROI
1949 *
1950 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1951 *              crop region
1952 *
1953 * PARAMETERS :
1954 *   @roi       : cam_area_t struct to resize
1955 *   @scalerCropRegion : cam_crop_region_t region to compare against
1956 *
1957 *
1958 *==========================================================================*/
1959bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1960                                                 const cam_crop_region_t* scalerCropRegion)
1961{
1962    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1963    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1964    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1965    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1966    if ((roi_x_max < scalerCropRegion->left) ||
1967        (roi_y_max < scalerCropRegion->top)  ||
1968        (roi->rect.left > crop_x_max) ||
1969        (roi->rect.top > crop_y_max)){
1970        return false;
1971    }
1972    if (roi->rect.left < scalerCropRegion->left) {
1973        roi->rect.left = scalerCropRegion->left;
1974    }
1975    if (roi->rect.top < scalerCropRegion->top) {
1976        roi->rect.top = scalerCropRegion->top;
1977    }
1978    if (roi_x_max > crop_x_max) {
1979        roi_x_max = crop_x_max;
1980    }
1981    if (roi_y_max > crop_y_max) {
1982        roi_y_max = crop_y_max;
1983    }
1984    roi->rect.width = roi_x_max - roi->rect.left;
1985    roi->rect.height = roi_y_max - roi->rect.top;
1986    return true;
1987}
1988
1989/*===========================================================================
1990 * FUNCTION   : convertLandmarks
1991 *
1992 * DESCRIPTION: helper method to extract the landmarks from face detection info
1993 *
1994 * PARAMETERS :
1995 *   @face   : cam_rect_t struct to convert
1996 *   @landmarks : int32_t destination array
1997 *
1998 *
1999 *==========================================================================*/
2000void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2001{
2002    landmarks[0] = face.left_eye_center.x;
2003    landmarks[1] = face.left_eye_center.y;
2004    landmarks[2] = face.right_eye_center.y;
2005    landmarks[3] = face.right_eye_center.y;
2006    landmarks[4] = face.mouth_center.x;
2007    landmarks[5] = face.mouth_center.y;
2008}
2009
2010#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2011/*===========================================================================
2012 * FUNCTION   : initCapabilities
2013 *
2014 * DESCRIPTION: initialize camera capabilities in static data struct
2015 *
2016 * PARAMETERS :
2017 *   @cameraId  : camera Id
2018 *
2019 * RETURN     : int32_t type of status
2020 *              NO_ERROR  -- success
2021 *              none-zero failure code
2022 *==========================================================================*/
2023int QCamera3HardwareInterface::initCapabilities(int cameraId)
2024{
2025    int rc = 0;
2026    mm_camera_vtbl_t *cameraHandle = NULL;
2027    QCamera3HeapMemory *capabilityHeap = NULL;
2028
2029    cameraHandle = camera_open(cameraId);
2030    if (!cameraHandle) {
2031        ALOGE("%s: camera_open failed", __func__);
2032        rc = -1;
2033        goto open_failed;
2034    }
2035
2036    capabilityHeap = new QCamera3HeapMemory();
2037    if (capabilityHeap == NULL) {
2038        ALOGE("%s: creation of capabilityHeap failed", __func__);
2039        goto heap_creation_failed;
2040    }
2041    /* Allocate memory for capability buffer */
2042    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2043    if(rc != OK) {
2044        ALOGE("%s: No memory for cappability", __func__);
2045        goto allocate_failed;
2046    }
2047
2048    /* Map memory for capability buffer */
2049    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2050    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2051                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2052                                capabilityHeap->getFd(0),
2053                                sizeof(cam_capability_t));
2054    if(rc < 0) {
2055        ALOGE("%s: failed to map capability buffer", __func__);
2056        goto map_failed;
2057    }
2058
2059    /* Query Capability */
2060    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2061    if(rc < 0) {
2062        ALOGE("%s: failed to query capability",__func__);
2063        goto query_failed;
2064    }
2065    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2066    if (!gCamCapability[cameraId]) {
2067        ALOGE("%s: out of memory", __func__);
2068        goto query_failed;
2069    }
2070    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2071                                        sizeof(cam_capability_t));
2072    rc = 0;
2073
2074query_failed:
2075    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2076                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2077map_failed:
2078    capabilityHeap->deallocate();
2079allocate_failed:
2080    delete capabilityHeap;
2081heap_creation_failed:
2082    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2083    cameraHandle = NULL;
2084open_failed:
2085    return rc;
2086}
2087
2088/*===========================================================================
2089 * FUNCTION   : initParameters
2090 *
2091 * DESCRIPTION: initialize camera parameters
2092 *
2093 * PARAMETERS :
2094 *
2095 * RETURN     : int32_t type of status
2096 *              NO_ERROR  -- success
2097 *              none-zero failure code
2098 *==========================================================================*/
2099int QCamera3HardwareInterface::initParameters()
2100{
2101    int rc = 0;
2102
2103    //Allocate Set Param Buffer
2104    mParamHeap = new QCamera3HeapMemory();
2105    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2106    if(rc != OK) {
2107        rc = NO_MEMORY;
2108        ALOGE("Failed to allocate SETPARM Heap memory");
2109        delete mParamHeap;
2110        mParamHeap = NULL;
2111        return rc;
2112    }
2113
2114    //Map memory for parameters buffer
2115    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2116            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2117            mParamHeap->getFd(0),
2118            sizeof(parm_buffer_t));
2119    if(rc < 0) {
2120        ALOGE("%s:failed to map SETPARM buffer",__func__);
2121        rc = FAILED_TRANSACTION;
2122        mParamHeap->deallocate();
2123        delete mParamHeap;
2124        mParamHeap = NULL;
2125        return rc;
2126    }
2127
2128    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2129    return rc;
2130}
2131
2132/*===========================================================================
2133 * FUNCTION   : deinitParameters
2134 *
2135 * DESCRIPTION: de-initialize camera parameters
2136 *
2137 * PARAMETERS :
2138 *
2139 * RETURN     : NONE
2140 *==========================================================================*/
2141void QCamera3HardwareInterface::deinitParameters()
2142{
2143    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2144            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2145
2146    mParamHeap->deallocate();
2147    delete mParamHeap;
2148    mParamHeap = NULL;
2149
2150    mParameters = NULL;
2151}
2152
2153/*===========================================================================
2154 * FUNCTION   : calcMaxJpegSize
2155 *
2156 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2157 *
2158 * PARAMETERS :
2159 *
2160 * RETURN     : max_jpeg_size
2161 *==========================================================================*/
2162int QCamera3HardwareInterface::calcMaxJpegSize()
2163{
2164    int32_t max_jpeg_size = 0;
2165    int temp_width, temp_height;
2166    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2167        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2168        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2169        if (temp_width * temp_height > max_jpeg_size ) {
2170            max_jpeg_size = temp_width * temp_height;
2171        }
2172    }
2173    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2174    return max_jpeg_size;
2175}
2176
2177/*===========================================================================
2178 * FUNCTION   : initStaticMetadata
2179 *
2180 * DESCRIPTION: initialize the static metadata
2181 *
2182 * PARAMETERS :
2183 *   @cameraId  : camera Id
2184 *
2185 * RETURN     : int32_t type of status
2186 *              0  -- success
2187 *              non-zero failure code
2188 *==========================================================================*/
2189int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2190{
2191    int rc = 0;
2192    CameraMetadata staticInfo;
2193
2194    /* android.info: hardware level */
2195    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2196    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2197        &supportedHardwareLevel, 1);
2198
2199    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2200    /*HAL 3 only*/
2201    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2202                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2203
2204    /*hard coded for now but this should come from sensor*/
2205    float min_focus_distance;
2206    if(facingBack){
2207        min_focus_distance = 10;
2208    } else {
2209        min_focus_distance = 0;
2210    }
2211    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2212                    &min_focus_distance, 1);
2213
2214    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2215                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2216
2217    /*should be using focal lengths but sensor doesn't provide that info now*/
2218    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2219                      &gCamCapability[cameraId]->focal_length,
2220                      1);
2221
2222    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2223                      gCamCapability[cameraId]->apertures,
2224                      gCamCapability[cameraId]->apertures_count);
2225
2226    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2227                gCamCapability[cameraId]->filter_densities,
2228                gCamCapability[cameraId]->filter_densities_count);
2229
2230
2231    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2232                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2233                      gCamCapability[cameraId]->optical_stab_modes_count);
2234
2235    staticInfo.update(ANDROID_LENS_POSITION,
2236                      gCamCapability[cameraId]->lens_position,
2237                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2238
2239    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2240                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2241    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2242                      lens_shading_map_size,
2243                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2244
2245    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2246                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2247    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2248            geo_correction_map_size,
2249            sizeof(geo_correction_map_size)/sizeof(int32_t));
2250
2251    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2252                       gCamCapability[cameraId]->geo_correction_map,
2253                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2254
2255    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2256            gCamCapability[cameraId]->sensor_physical_size, 2);
2257
2258    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2259            gCamCapability[cameraId]->exposure_time_range, 2);
2260
2261    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2262            &gCamCapability[cameraId]->max_frame_duration, 1);
2263
2264    camera_metadata_rational baseGainFactor = {
2265            gCamCapability[cameraId]->base_gain_factor.numerator,
2266            gCamCapability[cameraId]->base_gain_factor.denominator};
2267    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2268                      &baseGainFactor, 1);
2269
2270    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2271                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2272
2273    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2274                                               gCamCapability[cameraId]->pixel_array_size.height};
2275    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2276                      pixel_array_size, 2);
2277
2278    int32_t active_array_size[] = {0, 0,
2279                                                gCamCapability[cameraId]->active_array_size.width,
2280                                                gCamCapability[cameraId]->active_array_size.height};
2281    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2282                      active_array_size, 4);
2283
2284    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2285            &gCamCapability[cameraId]->white_level, 1);
2286
2287    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2288            gCamCapability[cameraId]->black_level_pattern, 4);
2289
2290    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2291                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2292
2293    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2294                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2295
2296    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2297                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2298
2299    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2300                      &gCamCapability[cameraId]->histogram_size, 1);
2301
2302    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2303            &gCamCapability[cameraId]->max_histogram_count, 1);
2304
2305    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2306                                                gCamCapability[cameraId]->sharpness_map_size.height};
2307
2308    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2309            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2310
2311    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2312            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2313
2314
2315    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2316                      &gCamCapability[cameraId]->raw_min_duration,
2317                       1);
2318
2319    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2320                                                HAL_PIXEL_FORMAT_BLOB};
2321    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2322    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2323                      scalar_formats,
2324                      scalar_formats_count);
2325
2326    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2327    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2328              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2329              available_processed_sizes);
2330    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2331                available_processed_sizes,
2332                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2333
2334    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2335                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2336                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2337
2338    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2339    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2340                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2341                 available_fps_ranges);
2342    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2343            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2344
2345    camera_metadata_rational exposureCompensationStep = {
2346            gCamCapability[cameraId]->exp_compensation_step.numerator,
2347            gCamCapability[cameraId]->exp_compensation_step.denominator};
2348    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2349                      &exposureCompensationStep, 1);
2350
2351    /*TO DO*/
2352    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2353    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2354                      availableVstabModes, sizeof(availableVstabModes));
2355
2356    /*HAL 1 and HAL 3 common*/
2357    float maxZoom = 4;
2358    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2359            &maxZoom, 1);
2360
2361    int32_t max3aRegions = 1;
2362    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2363            &max3aRegions, 1);
2364
2365    uint8_t availableFaceDetectModes[] = {
2366            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2367            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2368    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2369                      availableFaceDetectModes,
2370                      sizeof(availableFaceDetectModes));
2371
2372    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2373                                                        gCamCapability[cameraId]->exposure_compensation_max};
2374    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2375            exposureCompensationRange,
2376            sizeof(exposureCompensationRange)/sizeof(int32_t));
2377
2378    uint8_t lensFacing = (facingBack) ?
2379            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2380    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2381
2382    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2383                available_processed_sizes,
2384                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2385
2386    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2387                      available_thumbnail_sizes,
2388                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2389
2390    int32_t max_jpeg_size = 0;
2391    int temp_width, temp_height;
2392    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2393        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2394        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2395        if (temp_width * temp_height > max_jpeg_size ) {
2396            max_jpeg_size = temp_width * temp_height;
2397        }
2398    }
2399    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2400    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2401                      &max_jpeg_size, 1);
2402
2403    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2404    int32_t size = 0;
2405    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2406        int val = lookupFwkName(EFFECT_MODES_MAP,
2407                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2408                                   gCamCapability[cameraId]->supported_effects[i]);
2409        if (val != NAME_NOT_FOUND) {
2410            avail_effects[size] = (uint8_t)val;
2411            size++;
2412        }
2413    }
2414    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2415                      avail_effects,
2416                      size);
2417
2418    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2419    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2420    int32_t supported_scene_modes_cnt = 0;
2421    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2422        int val = lookupFwkName(SCENE_MODES_MAP,
2423                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2424                                gCamCapability[cameraId]->supported_scene_modes[i]);
2425        if (val != NAME_NOT_FOUND) {
2426            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2427            supported_indexes[supported_scene_modes_cnt] = i;
2428            supported_scene_modes_cnt++;
2429        }
2430    }
2431
2432    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2433                      avail_scene_modes,
2434                      supported_scene_modes_cnt);
2435
2436    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2437    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2438                      supported_scene_modes_cnt,
2439                      scene_mode_overrides,
2440                      supported_indexes,
2441                      cameraId);
2442    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2443                      scene_mode_overrides,
2444                      supported_scene_modes_cnt*3);
2445
2446    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2447    size = 0;
2448    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2449        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2450                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2451                                 gCamCapability[cameraId]->supported_antibandings[i]);
2452        if (val != NAME_NOT_FOUND) {
2453            avail_antibanding_modes[size] = (uint8_t)val;
2454            size++;
2455        }
2456
2457    }
2458    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2459                      avail_antibanding_modes,
2460                      size);
2461
2462    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2463    size = 0;
2464    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2465        int val = lookupFwkName(FOCUS_MODES_MAP,
2466                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2467                                gCamCapability[cameraId]->supported_focus_modes[i]);
2468        if (val != NAME_NOT_FOUND) {
2469            avail_af_modes[size] = (uint8_t)val;
2470            size++;
2471        }
2472    }
2473    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2474                      avail_af_modes,
2475                      size);
2476
2477    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2478    size = 0;
2479    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2480        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2481                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2482                                    gCamCapability[cameraId]->supported_white_balances[i]);
2483        if (val != NAME_NOT_FOUND) {
2484            avail_awb_modes[size] = (uint8_t)val;
2485            size++;
2486        }
2487    }
2488    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2489                      avail_awb_modes,
2490                      size);
2491
2492    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2493    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2494      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2495
2496    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2497            available_flash_levels,
2498            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2499
2500
2501    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2502    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2503            &flashAvailable, 1);
2504
2505    uint8_t avail_ae_modes[5];
2506    size = 0;
2507    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2508        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2509        size++;
2510    }
2511    if (flashAvailable) {
2512        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2513        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2514        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2515    }
2516    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2517                      avail_ae_modes,
2518                      size);
2519
2520    int32_t sensitivity_range[2];
2521    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2522    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2523    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2524                      sensitivity_range,
2525                      sizeof(sensitivity_range) / sizeof(int32_t));
2526
2527    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2528                      &gCamCapability[cameraId]->max_analog_sensitivity,
2529                      1);
2530
2531    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2532                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2533                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2534
2535    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2536    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2537                      &sensor_orientation,
2538                      1);
2539
2540    int32_t max_output_streams[3] = {1, 3, 1};
2541    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2542                      max_output_streams,
2543                      3);
2544
2545    gStaticMetadata[cameraId] = staticInfo.release();
2546    return rc;
2547}
2548
2549/*===========================================================================
2550 * FUNCTION   : makeTable
2551 *
2552 * DESCRIPTION: make a table of sizes
2553 *
2554 * PARAMETERS :
2555 *
2556 *
2557 *==========================================================================*/
2558void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2559                                          int32_t* sizeTable)
2560{
2561    int j = 0;
2562    for (int i = 0; i < size; i++) {
2563        sizeTable[j] = dimTable[i].width;
2564        sizeTable[j+1] = dimTable[i].height;
2565        j+=2;
2566    }
2567}
2568
2569/*===========================================================================
2570 * FUNCTION   : makeFPSTable
2571 *
2572 * DESCRIPTION: make a table of fps ranges
2573 *
2574 * PARAMETERS :
2575 *
2576 *==========================================================================*/
2577void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2578                                          int32_t* fpsRangesTable)
2579{
2580    int j = 0;
2581    for (int i = 0; i < size; i++) {
2582        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2583        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2584        j+=2;
2585    }
2586}
2587
2588/*===========================================================================
2589 * FUNCTION   : makeOverridesList
2590 *
2591 * DESCRIPTION: make a list of scene mode overrides
2592 *
2593 * PARAMETERS :
2594 *
2595 *
2596 *==========================================================================*/
2597void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2598                                                  uint8_t size, uint8_t* overridesList,
2599                                                  uint8_t* supported_indexes,
2600                                                  int camera_id)
2601{
2602    /*daemon will give a list of overrides for all scene modes.
2603      However we should send the fwk only the overrides for the scene modes
2604      supported by the framework*/
2605    int j = 0, index = 0, supt = 0;
2606    uint8_t focus_override;
2607    for (int i = 0; i < size; i++) {
2608        supt = 0;
2609        index = supported_indexes[i];
2610        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2611        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2612                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2613                                                    overridesTable[index].awb_mode);
2614        focus_override = (uint8_t)overridesTable[index].af_mode;
2615        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2616           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2617              supt = 1;
2618              break;
2619           }
2620        }
2621        if (supt) {
2622           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2623                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2624                                              focus_override);
2625        } else {
2626           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2627        }
2628        j+=3;
2629    }
2630}
2631
2632/*===========================================================================
2633 * FUNCTION   : getPreviewHalPixelFormat
2634 *
2635 * DESCRIPTION: convert the format to type recognized by framework
2636 *
2637 * PARAMETERS : format : the format from backend
2638 *
2639 ** RETURN    : format recognized by framework
2640 *
2641 *==========================================================================*/
2642int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2643{
2644    int32_t halPixelFormat;
2645
2646    switch (format) {
2647    case CAM_FORMAT_YUV_420_NV12:
2648        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2649        break;
2650    case CAM_FORMAT_YUV_420_NV21:
2651        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2652        break;
2653    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2654        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2655        break;
2656    case CAM_FORMAT_YUV_420_YV12:
2657        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2658        break;
2659    case CAM_FORMAT_YUV_422_NV16:
2660    case CAM_FORMAT_YUV_422_NV61:
2661    default:
2662        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2663        break;
2664    }
2665    return halPixelFormat;
2666}
2667
2668/*===========================================================================
2669 * FUNCTION   : getSensorSensitivity
2670 *
2671 * DESCRIPTION: convert iso_mode to an integer value
2672 *
2673 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2674 *
2675 ** RETURN    : sensitivity supported by sensor
2676 *
2677 *==========================================================================*/
2678int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2679{
2680    int32_t sensitivity;
2681
2682    switch (iso_mode) {
2683    case CAM_ISO_MODE_100:
2684        sensitivity = 100;
2685        break;
2686    case CAM_ISO_MODE_200:
2687        sensitivity = 200;
2688        break;
2689    case CAM_ISO_MODE_400:
2690        sensitivity = 400;
2691        break;
2692    case CAM_ISO_MODE_800:
2693        sensitivity = 800;
2694        break;
2695    case CAM_ISO_MODE_1600:
2696        sensitivity = 1600;
2697        break;
2698    default:
2699        sensitivity = -1;
2700        break;
2701    }
2702    return sensitivity;
2703}
2704
2705
2706/*===========================================================================
2707 * FUNCTION   : AddSetParmEntryToBatch
2708 *
2709 * DESCRIPTION: add set parameter entry into batch
2710 *
2711 * PARAMETERS :
2712 *   @p_table     : ptr to parameter buffer
2713 *   @paramType   : parameter type
2714 *   @paramLength : length of parameter value
2715 *   @paramValue  : ptr to parameter value
2716 *
2717 * RETURN     : int32_t type of status
2718 *              NO_ERROR  -- success
2719 *              none-zero failure code
2720 *==========================================================================*/
2721int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2722                                                          cam_intf_parm_type_t paramType,
2723                                                          uint32_t paramLength,
2724                                                          void *paramValue)
2725{
2726    int position = paramType;
2727    int current, next;
2728
2729    /*************************************************************************
2730    *                 Code to take care of linking next flags                *
2731    *************************************************************************/
2732    current = GET_FIRST_PARAM_ID(p_table);
2733    if (position == current){
2734        //DO NOTHING
2735    } else if (position < current){
2736        SET_NEXT_PARAM_ID(position, p_table, current);
2737        SET_FIRST_PARAM_ID(p_table, position);
2738    } else {
2739        /* Search for the position in the linked list where we need to slot in*/
2740        while (position > GET_NEXT_PARAM_ID(current, p_table))
2741            current = GET_NEXT_PARAM_ID(current, p_table);
2742
2743        /*If node already exists no need to alter linking*/
2744        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2745            next = GET_NEXT_PARAM_ID(current, p_table);
2746            SET_NEXT_PARAM_ID(current, p_table, position);
2747            SET_NEXT_PARAM_ID(position, p_table, next);
2748        }
2749    }
2750
2751    /*************************************************************************
2752    *                   Copy contents into entry                             *
2753    *************************************************************************/
2754
2755    if (paramLength > sizeof(parm_type_t)) {
2756        ALOGE("%s:Size of input larger than max entry size",__func__);
2757        return BAD_VALUE;
2758    }
2759    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2760    return NO_ERROR;
2761}
2762
2763/*===========================================================================
2764 * FUNCTION   : lookupFwkName
2765 *
2766 * DESCRIPTION: In case the enum is not same in fwk and backend
2767 *              make sure the parameter is correctly propogated
2768 *
2769 * PARAMETERS  :
2770 *   @arr      : map between the two enums
2771 *   @len      : len of the map
2772 *   @hal_name : name of the hal_parm to map
2773 *
2774 * RETURN     : int type of status
2775 *              fwk_name  -- success
2776 *              none-zero failure code
2777 *==========================================================================*/
2778int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2779                                             int len, int hal_name)
2780{
2781
2782    for (int i = 0; i < len; i++) {
2783        if (arr[i].hal_name == hal_name)
2784            return arr[i].fwk_name;
2785    }
2786
2787    /* Not able to find matching framework type is not necessarily
2788     * an error case. This happens when mm-camera supports more attributes
2789     * than the frameworks do */
2790    ALOGD("%s: Cannot find matching framework type", __func__);
2791    return NAME_NOT_FOUND;
2792}
2793
2794/*===========================================================================
2795 * FUNCTION   : lookupHalName
2796 *
2797 * DESCRIPTION: In case the enum is not same in fwk and backend
2798 *              make sure the parameter is correctly propogated
2799 *
2800 * PARAMETERS  :
2801 *   @arr      : map between the two enums
2802 *   @len      : len of the map
2803 *   @fwk_name : name of the hal_parm to map
2804 *
2805 * RETURN     : int32_t type of status
2806 *              hal_name  -- success
2807 *              none-zero failure code
2808 *==========================================================================*/
2809int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2810                                             int len, int fwk_name)
2811{
2812    for (int i = 0; i < len; i++) {
2813       if (arr[i].fwk_name == fwk_name)
2814           return arr[i].hal_name;
2815    }
2816    ALOGE("%s: Cannot find matching hal type", __func__);
2817    return NAME_NOT_FOUND;
2818}
2819
2820/*===========================================================================
2821 * FUNCTION   : getCapabilities
2822 *
2823 * DESCRIPTION: query camera capabilities
2824 *
2825 * PARAMETERS :
2826 *   @cameraId  : camera Id
2827 *   @info      : camera info struct to be filled in with camera capabilities
2828 *
2829 * RETURN     : int32_t type of status
2830 *              NO_ERROR  -- success
2831 *              none-zero failure code
2832 *==========================================================================*/
2833int QCamera3HardwareInterface::getCamInfo(int cameraId,
2834                                    struct camera_info *info)
2835{
2836    int rc = 0;
2837
2838    if (NULL == gCamCapability[cameraId]) {
2839        rc = initCapabilities(cameraId);
2840        if (rc < 0) {
2841            //pthread_mutex_unlock(&g_camlock);
2842            return rc;
2843        }
2844    }
2845
2846    if (NULL == gStaticMetadata[cameraId]) {
2847        rc = initStaticMetadata(cameraId);
2848        if (rc < 0) {
2849            return rc;
2850        }
2851    }
2852
2853    switch(gCamCapability[cameraId]->position) {
2854    case CAM_POSITION_BACK:
2855        info->facing = CAMERA_FACING_BACK;
2856        break;
2857
2858    case CAM_POSITION_FRONT:
2859        info->facing = CAMERA_FACING_FRONT;
2860        break;
2861
2862    default:
2863        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2864        rc = -1;
2865        break;
2866    }
2867
2868
2869    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2870    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2871    info->static_camera_characteristics = gStaticMetadata[cameraId];
2872
2873    return rc;
2874}
2875
2876/*===========================================================================
2877 * FUNCTION   : translateMetadata
2878 *
2879 * DESCRIPTION: translate the metadata into camera_metadata_t
2880 *
2881 * PARAMETERS : type of the request
2882 *
2883 *
2884 * RETURN     : success: camera_metadata_t*
2885 *              failure: NULL
2886 *
2887 *==========================================================================*/
2888camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2889{
2890    pthread_mutex_lock(&mMutex);
2891
2892    if (mDefaultMetadata[type] != NULL) {
2893        pthread_mutex_unlock(&mMutex);
2894        return mDefaultMetadata[type];
2895    }
2896    //first time we are handling this request
2897    //fill up the metadata structure using the wrapper class
2898    CameraMetadata settings;
2899    //translate from cam_capability_t to camera_metadata_tag_t
2900    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2901    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2902    int32_t defaultRequestID = 0;
2903    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
2904
2905    /*control*/
2906
2907    uint8_t controlIntent = 0;
2908    switch (type) {
2909      case CAMERA3_TEMPLATE_PREVIEW:
2910        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2911        break;
2912      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2913        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2914        break;
2915      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2916        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2917        break;
2918      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2919        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2920        break;
2921      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2922        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2923        break;
2924      default:
2925        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2926        break;
2927    }
2928    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2929
2930    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2931            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2932
2933    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2934    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2935
2936    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2937    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2938
2939    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2940    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2941
2942    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2943    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2944
2945    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2946    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2947
2948    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2949    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2950
2951    static uint8_t focusMode;
2952    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2953        ALOGE("%s: Setting focus mode to auto", __func__);
2954        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2955    } else {
2956        ALOGE("%s: Setting focus mode to off", __func__);
2957        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2958    }
2959    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2960
2961    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2962    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2963
2964    /*flash*/
2965    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2966    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2967
2968    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2969    settings.update(ANDROID_FLASH_FIRING_POWER,
2970            &flashFiringLevel, 1);
2971
2972    /* lens */
2973    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2974    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2975
2976    if (gCamCapability[mCameraId]->filter_densities_count) {
2977        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2978        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2979                        gCamCapability[mCameraId]->filter_densities_count);
2980    }
2981
2982    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2983    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2984
2985    /* Exposure time(Update the Min Exposure Time)*/
2986    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
2987    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
2988
2989    /* frame duration */
2990    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2991    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2992
2993    /* sensitivity */
2994    static const int32_t default_sensitivity = 100;
2995    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2996
2997    /*edge mode*/
2998    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2999    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3000
3001    /*noise reduction mode*/
3002    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3003    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3004
3005    /*color correction mode*/
3006    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3007    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3008
3009    /*transform matrix mode*/
3010    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3011    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3012
3013    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3014    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3015
3016    mDefaultMetadata[type] = settings.release();
3017
3018    pthread_mutex_unlock(&mMutex);
3019    return mDefaultMetadata[type];
3020}
3021
3022/*===========================================================================
3023 * FUNCTION   : setFrameParameters
3024 *
3025 * DESCRIPTION: set parameters per frame as requested in the metadata from
3026 *              framework
3027 *
3028 * PARAMETERS :
3029 *   @request   : request that needs to be serviced
3030 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3031 *
3032 * RETURN     : success: NO_ERROR
3033 *              failure:
3034 *==========================================================================*/
3035int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3036                    uint32_t streamTypeMask)
3037{
3038    /*translate from camera_metadata_t type to parm_type_t*/
3039    int rc = 0;
3040    if (request->settings == NULL && mFirstRequest) {
3041        /*settings cannot be null for the first request*/
3042        return BAD_VALUE;
3043    }
3044
3045    int32_t hal_version = CAM_HAL_V3;
3046
3047    memset(mParameters, 0, sizeof(parm_buffer_t));
3048    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3049    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3050                sizeof(hal_version), &hal_version);
3051    if (rc < 0) {
3052        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3053        return BAD_VALUE;
3054    }
3055
3056    /*we need to update the frame number in the parameters*/
3057    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3058                                sizeof(request->frame_number), &(request->frame_number));
3059    if (rc < 0) {
3060        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3061        return BAD_VALUE;
3062    }
3063
3064    /* Update stream id mask where buffers are requested */
3065    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3066                                sizeof(streamTypeMask), &streamTypeMask);
3067    if (rc < 0) {
3068        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3069        return BAD_VALUE;
3070    }
3071
3072    if(request->settings != NULL){
3073        rc = translateMetadataToParameters(request);
3074    }
3075    /*set the parameters to backend*/
3076    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3077    return rc;
3078}
3079
3080/*===========================================================================
3081 * FUNCTION   : translateMetadataToParameters
3082 *
3083 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3084 *
3085 *
3086 * PARAMETERS :
3087 *   @request  : request sent from framework
3088 *
3089 *
3090 * RETURN     : success: NO_ERROR
3091 *              failure:
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::translateMetadataToParameters
3094                                  (const camera3_capture_request_t *request)
3095{
3096    int rc = 0;
3097    CameraMetadata frame_settings;
3098    frame_settings = request->settings;
3099
3100    /* Do not change the order of the following list unless you know what you are
3101     * doing.
3102     * The order is laid out in such a way that parameters in the front of the table
3103     * may be used to override the parameters later in the table. Examples are:
3104     * 1. META_MODE should precede AEC/AWB/AF MODE
3105     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3106     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3107     * 4. Any mode should precede it's corresponding settings
3108     */
3109    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3110        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3111        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3112                sizeof(metaMode), &metaMode);
3113        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3114           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3115           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3116                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3117                                             fwk_sceneMode);
3118           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3119                sizeof(sceneMode), &sceneMode);
3120        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3121           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3122           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3123                sizeof(sceneMode), &sceneMode);
3124        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3125           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3126           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3127                sizeof(sceneMode), &sceneMode);
3128        }
3129    }
3130
3131    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3132        uint8_t fwk_aeMode =
3133            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3134        uint8_t aeMode;
3135        int32_t redeye;
3136
3137        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3138            aeMode = CAM_AE_MODE_OFF;
3139        } else {
3140            aeMode = CAM_AE_MODE_ON;
3141        }
3142        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3143            redeye = 1;
3144        } else {
3145            redeye = 0;
3146        }
3147
3148        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3149                                          sizeof(AE_FLASH_MODE_MAP),
3150                                          fwk_aeMode);
3151        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3152                sizeof(aeMode), &aeMode);
3153        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3154                sizeof(flashMode), &flashMode);
3155        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3156                sizeof(redeye), &redeye);
3157    }
3158
3159    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3160        uint8_t fwk_whiteLevel =
3161            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3162        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3163                sizeof(WHITE_BALANCE_MODES_MAP),
3164                fwk_whiteLevel);
3165        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3166                sizeof(whiteLevel), &whiteLevel);
3167    }
3168
3169    float focalDistance = -1.0;
3170    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3171        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3172        rc = AddSetParmEntryToBatch(mParameters,
3173                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3174                sizeof(focalDistance), &focalDistance);
3175    }
3176
3177    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3178        uint8_t fwk_focusMode =
3179            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3180        uint8_t focusMode;
3181        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3182            focusMode = CAM_FOCUS_MODE_INFINITY;
3183        } else{
3184         focusMode = lookupHalName(FOCUS_MODES_MAP,
3185                                   sizeof(FOCUS_MODES_MAP),
3186                                   fwk_focusMode);
3187        }
3188        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3189                sizeof(focusMode), &focusMode);
3190    }
3191
3192    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3193        int32_t antibandingMode =
3194            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3195        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3196                sizeof(antibandingMode), &antibandingMode);
3197    }
3198
3199    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3200        int32_t expCompensation = frame_settings.find(
3201            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3202        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3203            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3204        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3205            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3206        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3207          sizeof(expCompensation), &expCompensation);
3208    }
3209
3210    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3211        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3212        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3213                sizeof(aeLock), &aeLock);
3214    }
3215    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3216        cam_fps_range_t fps_range;
3217        fps_range.min_fps =
3218            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3219        fps_range.max_fps =
3220            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3221        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3222                sizeof(fps_range), &fps_range);
3223    }
3224
3225    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3226        uint8_t awbLock =
3227            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3228        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3229                sizeof(awbLock), &awbLock);
3230    }
3231
3232    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3233        uint8_t fwk_effectMode =
3234            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3235        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3236                sizeof(EFFECT_MODES_MAP),
3237                fwk_effectMode);
3238        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3239                sizeof(effectMode), &effectMode);
3240    }
3241
3242    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3243        uint8_t colorCorrectMode =
3244            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3245        rc =
3246            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3247                    sizeof(colorCorrectMode), &colorCorrectMode);
3248    }
3249
3250    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3251        cam_color_correct_gains_t colorCorrectGains;
3252        for (int i = 0; i < 4; i++) {
3253            colorCorrectGains.gains[i] =
3254                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3255        }
3256        rc =
3257            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3258                    sizeof(colorCorrectGains), &colorCorrectGains);
3259    }
3260
3261    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3262        cam_color_correct_matrix_t colorCorrectTransform;
3263        cam_rational_type_t transform_elem;
3264        int num = 0;
3265        for (int i = 0; i < 3; i++) {
3266           for (int j = 0; j < 3; j++) {
3267              transform_elem.numerator =
3268                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3269              transform_elem.denominator =
3270                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3271              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3272              num++;
3273           }
3274        }
3275        rc =
3276            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3277                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3278    }
3279
3280    cam_trigger_t aecTrigger;
3281    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3282    aecTrigger.trigger_id = -1;
3283    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3284        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3285        aecTrigger.trigger =
3286            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3287        aecTrigger.trigger_id =
3288            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3289    }
3290    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3291                                sizeof(aecTrigger), &aecTrigger);
3292
3293    /*af_trigger must come with a trigger id*/
3294    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3295        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3296        cam_trigger_t af_trigger;
3297        af_trigger.trigger =
3298            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3299        af_trigger.trigger_id =
3300            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3301        rc = AddSetParmEntryToBatch(mParameters,
3302                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3303    }
3304
3305    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3306        int32_t demosaic =
3307            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3308        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3309                sizeof(demosaic), &demosaic);
3310    }
3311
3312    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3313        cam_edge_application_t edge_application;
3314        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3315        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3316            edge_application.sharpness = 0;
3317        } else {
3318            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3319                int32_t edgeStrength =
3320                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3321                edge_application.sharpness = edgeStrength;
3322            } else {
3323                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3324            }
3325        }
3326        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3327                sizeof(edge_application), &edge_application);
3328    }
3329
3330    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3331        int32_t respectFlashMode = 1;
3332        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3333            uint8_t fwk_aeMode =
3334                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3335            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3336                respectFlashMode = 0;
3337                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3338                    __func__);
3339            }
3340        }
3341        if (respectFlashMode) {
3342            uint8_t flashMode =
3343                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3344            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3345                                          sizeof(FLASH_MODES_MAP),
3346                                          flashMode);
3347            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3348            // To check: CAM_INTF_META_FLASH_MODE usage
3349            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3350                          sizeof(flashMode), &flashMode);
3351        }
3352    }
3353
3354    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3355        uint8_t flashPower =
3356            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3357        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3358                sizeof(flashPower), &flashPower);
3359    }
3360
3361    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3362        int64_t flashFiringTime =
3363            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3364        rc = AddSetParmEntryToBatch(mParameters,
3365                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3366    }
3367
3368    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3369        uint8_t geometricMode =
3370            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3371        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3372                sizeof(geometricMode), &geometricMode);
3373    }
3374
3375    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3376        uint8_t geometricStrength =
3377            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3378        rc = AddSetParmEntryToBatch(mParameters,
3379                CAM_INTF_META_GEOMETRIC_STRENGTH,
3380                sizeof(geometricStrength), &geometricStrength);
3381    }
3382
3383    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3384        uint8_t hotPixelMode =
3385            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3386        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3387                sizeof(hotPixelMode), &hotPixelMode);
3388    }
3389
3390    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3391        float lensAperture =
3392            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3393        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3394                sizeof(lensAperture), &lensAperture);
3395    }
3396
3397    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3398        float filterDensity =
3399            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3400        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3401                sizeof(filterDensity), &filterDensity);
3402    }
3403
3404    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3405        float focalLength =
3406            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3407        rc = AddSetParmEntryToBatch(mParameters,
3408                CAM_INTF_META_LENS_FOCAL_LENGTH,
3409                sizeof(focalLength), &focalLength);
3410    }
3411
3412    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3413        uint8_t optStabMode =
3414            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3415        rc = AddSetParmEntryToBatch(mParameters,
3416                CAM_INTF_META_LENS_OPT_STAB_MODE,
3417                sizeof(optStabMode), &optStabMode);
3418    }
3419
3420    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3421        uint8_t noiseRedMode =
3422            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3423        rc = AddSetParmEntryToBatch(mParameters,
3424                CAM_INTF_META_NOISE_REDUCTION_MODE,
3425                sizeof(noiseRedMode), &noiseRedMode);
3426    }
3427
3428    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3429        uint8_t noiseRedStrength =
3430            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3431        rc = AddSetParmEntryToBatch(mParameters,
3432                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3433                sizeof(noiseRedStrength), &noiseRedStrength);
3434    }
3435
3436    cam_crop_region_t scalerCropRegion;
3437    bool scalerCropSet = false;
3438    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3439        scalerCropRegion.left =
3440            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3441        scalerCropRegion.top =
3442            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3443        scalerCropRegion.width =
3444            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3445        scalerCropRegion.height =
3446            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3447        rc = AddSetParmEntryToBatch(mParameters,
3448                CAM_INTF_META_SCALER_CROP_REGION,
3449                sizeof(scalerCropRegion), &scalerCropRegion);
3450        scalerCropSet = true;
3451    }
3452
3453    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3454        int64_t sensorExpTime =
3455            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3456        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3457        rc = AddSetParmEntryToBatch(mParameters,
3458                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3459                sizeof(sensorExpTime), &sensorExpTime);
3460    }
3461
3462    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3463        int64_t sensorFrameDuration =
3464            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3465        int64_t minFrameDuration = getMinFrameDuration(request);
3466        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3467        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3468            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3469        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3470        rc = AddSetParmEntryToBatch(mParameters,
3471                CAM_INTF_META_SENSOR_FRAME_DURATION,
3472                sizeof(sensorFrameDuration), &sensorFrameDuration);
3473    }
3474
3475    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3476        int32_t sensorSensitivity =
3477            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3478        if (sensorSensitivity <
3479                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3480            sensorSensitivity =
3481                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3482        if (sensorSensitivity >
3483                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3484            sensorSensitivity =
3485                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3486        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3487        rc = AddSetParmEntryToBatch(mParameters,
3488                CAM_INTF_META_SENSOR_SENSITIVITY,
3489                sizeof(sensorSensitivity), &sensorSensitivity);
3490    }
3491
3492    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3493        int32_t shadingMode =
3494            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3495        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3496                sizeof(shadingMode), &shadingMode);
3497    }
3498
3499    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3500        uint8_t shadingStrength =
3501            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3502        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3503                sizeof(shadingStrength), &shadingStrength);
3504    }
3505
3506    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3507        uint8_t fwk_facedetectMode =
3508            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3509        uint8_t facedetectMode =
3510            lookupHalName(FACEDETECT_MODES_MAP,
3511                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3512        rc = AddSetParmEntryToBatch(mParameters,
3513                CAM_INTF_META_STATS_FACEDETECT_MODE,
3514                sizeof(facedetectMode), &facedetectMode);
3515    }
3516
3517    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3518        uint8_t histogramMode =
3519            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3520        rc = AddSetParmEntryToBatch(mParameters,
3521                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3522                sizeof(histogramMode), &histogramMode);
3523    }
3524
3525    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3526        uint8_t sharpnessMapMode =
3527            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3528        rc = AddSetParmEntryToBatch(mParameters,
3529                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3530                sizeof(sharpnessMapMode), &sharpnessMapMode);
3531    }
3532
3533    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3534        uint8_t tonemapMode =
3535            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3536        rc = AddSetParmEntryToBatch(mParameters,
3537                CAM_INTF_META_TONEMAP_MODE,
3538                sizeof(tonemapMode), &tonemapMode);
3539    }
3540    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3541    /*All tonemap channels will have the same number of points*/
3542    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3543        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3544        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3545        cam_rgb_tonemap_curves tonemapCurves;
3546        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3547
3548        /* ch0 = G*/
3549        int point = 0;
3550        cam_tonemap_curve_t tonemapCurveGreen;
3551        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3552            for (int j = 0; j < 2; j++) {
3553               tonemapCurveGreen.tonemap_points[i][j] =
3554                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3555               point++;
3556            }
3557        }
3558        tonemapCurves.curves[0] = tonemapCurveGreen;
3559
3560        /* ch 1 = B */
3561        point = 0;
3562        cam_tonemap_curve_t tonemapCurveBlue;
3563        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3564            for (int j = 0; j < 2; j++) {
3565               tonemapCurveBlue.tonemap_points[i][j] =
3566                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3567               point++;
3568            }
3569        }
3570        tonemapCurves.curves[1] = tonemapCurveBlue;
3571
3572        /* ch 2 = R */
3573        point = 0;
3574        cam_tonemap_curve_t tonemapCurveRed;
3575        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3576            for (int j = 0; j < 2; j++) {
3577               tonemapCurveRed.tonemap_points[i][j] =
3578                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3579               point++;
3580            }
3581        }
3582        tonemapCurves.curves[2] = tonemapCurveRed;
3583
3584        rc = AddSetParmEntryToBatch(mParameters,
3585                CAM_INTF_META_TONEMAP_CURVES,
3586                sizeof(tonemapCurves), &tonemapCurves);
3587    }
3588
3589    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3590        uint8_t captureIntent =
3591            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3592        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3593                sizeof(captureIntent), &captureIntent);
3594    }
3595
3596    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3597        uint8_t blackLevelLock =
3598            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3599        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3600                sizeof(blackLevelLock), &blackLevelLock);
3601    }
3602
3603    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3604        uint8_t lensShadingMapMode =
3605            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3606        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3607                sizeof(lensShadingMapMode), &lensShadingMapMode);
3608    }
3609
3610    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3611        cam_area_t roi;
3612        bool reset = true;
3613        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3614        if (scalerCropSet) {
3615            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3616        }
3617        if (reset) {
3618            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3619                    sizeof(roi), &roi);
3620        }
3621    }
3622
3623    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3624        cam_area_t roi;
3625        bool reset = true;
3626        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3627        if (scalerCropSet) {
3628            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3629        }
3630        if (reset) {
3631            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3632                    sizeof(roi), &roi);
3633        }
3634    }
3635
3636    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3637        cam_area_t roi;
3638        bool reset = true;
3639        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3640        if (scalerCropSet) {
3641            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3642        }
3643        if (reset) {
3644            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3645                    sizeof(roi), &roi);
3646        }
3647    }
3648    return rc;
3649}
3650
3651/*===========================================================================
3652 * FUNCTION   : getJpegSettings
3653 *
3654 * DESCRIPTION: save the jpeg settings in the HAL
3655 *
3656 *
3657 * PARAMETERS :
3658 *   @settings  : frame settings information from framework
3659 *
3660 *
3661 * RETURN     : success: NO_ERROR
3662 *              failure:
3663 *==========================================================================*/
3664int QCamera3HardwareInterface::getJpegSettings
3665                                  (const camera_metadata_t *settings)
3666{
3667    if (mJpegSettings) {
3668        if (mJpegSettings->gps_timestamp) {
3669            free(mJpegSettings->gps_timestamp);
3670            mJpegSettings->gps_timestamp = NULL;
3671        }
3672        if (mJpegSettings->gps_coordinates) {
3673            for (int i = 0; i < 3; i++) {
3674                free(mJpegSettings->gps_coordinates[i]);
3675                mJpegSettings->gps_coordinates[i] = NULL;
3676            }
3677        }
3678        free(mJpegSettings);
3679        mJpegSettings = NULL;
3680    }
3681    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3682    CameraMetadata jpeg_settings;
3683    jpeg_settings = settings;
3684
3685    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3686        mJpegSettings->jpeg_orientation =
3687            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3688    } else {
3689        mJpegSettings->jpeg_orientation = 0;
3690    }
3691    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3692        mJpegSettings->jpeg_quality =
3693            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3694    } else {
3695        mJpegSettings->jpeg_quality = 85;
3696    }
3697    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3698        mJpegSettings->thumbnail_size.width =
3699            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3700        mJpegSettings->thumbnail_size.height =
3701            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3702    } else {
3703        mJpegSettings->thumbnail_size.width = 0;
3704        mJpegSettings->thumbnail_size.height = 0;
3705    }
3706    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3707        for (int i = 0; i < 3; i++) {
3708            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3709            *(mJpegSettings->gps_coordinates[i]) =
3710                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3711        }
3712    } else{
3713       for (int i = 0; i < 3; i++) {
3714            mJpegSettings->gps_coordinates[i] = NULL;
3715        }
3716    }
3717
3718    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3719        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3720        *(mJpegSettings->gps_timestamp) =
3721            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3722    } else {
3723        mJpegSettings->gps_timestamp = NULL;
3724    }
3725
3726    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3727        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3728        for (int i = 0; i < len; i++) {
3729            mJpegSettings->gps_processing_method[i] =
3730                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3731        }
3732        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3733            mJpegSettings->gps_processing_method[len] = '\0';
3734        }
3735    } else {
3736        mJpegSettings->gps_processing_method[0] = '\0';
3737    }
3738
3739    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3740        mJpegSettings->sensor_sensitivity =
3741            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3742    } else {
3743        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3744    }
3745
3746    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3747
3748    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3749        mJpegSettings->lens_focal_length =
3750            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3751    }
3752    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3753        mJpegSettings->exposure_compensation =
3754            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3755    }
3756    mJpegSettings->sharpness = 10; //default value
3757    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3758        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3759        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3760            mJpegSettings->sharpness = 0;
3761        }
3762    }
3763    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3764    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3765    mJpegSettings->is_jpeg_format = true;
3766    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3767    return 0;
3768}
3769
3770/*===========================================================================
3771 * FUNCTION   : captureResultCb
3772 *
3773 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3774 *
3775 * PARAMETERS :
3776 *   @frame  : frame information from mm-camera-interface
3777 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3778 *   @userdata: userdata
3779 *
3780 * RETURN     : NONE
3781 *==========================================================================*/
3782void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3783                camera3_stream_buffer_t *buffer,
3784                uint32_t frame_number, void *userdata)
3785{
3786    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3787    if (hw == NULL) {
3788        ALOGE("%s: Invalid hw %p", __func__, hw);
3789        return;
3790    }
3791
3792    hw->captureResultCb(metadata, buffer, frame_number);
3793    return;
3794}
3795
3796
3797/*===========================================================================
3798 * FUNCTION   : initialize
3799 *
3800 * DESCRIPTION: Pass framework callback pointers to HAL
3801 *
3802 * PARAMETERS :
3803 *
3804 *
3805 * RETURN     : Success : 0
3806 *              Failure: -ENODEV
3807 *==========================================================================*/
3808
3809int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3810                                  const camera3_callback_ops_t *callback_ops)
3811{
3812    ALOGV("%s: E", __func__);
3813    QCamera3HardwareInterface *hw =
3814        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3815    if (!hw) {
3816        ALOGE("%s: NULL camera device", __func__);
3817        return -ENODEV;
3818    }
3819
3820    int rc = hw->initialize(callback_ops);
3821    ALOGV("%s: X", __func__);
3822    return rc;
3823}
3824
3825/*===========================================================================
3826 * FUNCTION   : configure_streams
3827 *
3828 * DESCRIPTION:
3829 *
3830 * PARAMETERS :
3831 *
3832 *
3833 * RETURN     : Success: 0
3834 *              Failure: -EINVAL (if stream configuration is invalid)
3835 *                       -ENODEV (fatal error)
3836 *==========================================================================*/
3837
3838int QCamera3HardwareInterface::configure_streams(
3839        const struct camera3_device *device,
3840        camera3_stream_configuration_t *stream_list)
3841{
3842    ALOGV("%s: E", __func__);
3843    QCamera3HardwareInterface *hw =
3844        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3845    if (!hw) {
3846        ALOGE("%s: NULL camera device", __func__);
3847        return -ENODEV;
3848    }
3849    int rc = hw->configureStreams(stream_list);
3850    ALOGV("%s: X", __func__);
3851    return rc;
3852}
3853
3854/*===========================================================================
3855 * FUNCTION   : register_stream_buffers
3856 *
3857 * DESCRIPTION: Register stream buffers with the device
3858 *
3859 * PARAMETERS :
3860 *
3861 * RETURN     :
3862 *==========================================================================*/
3863int QCamera3HardwareInterface::register_stream_buffers(
3864        const struct camera3_device *device,
3865        const camera3_stream_buffer_set_t *buffer_set)
3866{
3867    ALOGV("%s: E", __func__);
3868    QCamera3HardwareInterface *hw =
3869        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3870    if (!hw) {
3871        ALOGE("%s: NULL camera device", __func__);
3872        return -ENODEV;
3873    }
3874    int rc = hw->registerStreamBuffers(buffer_set);
3875    ALOGV("%s: X", __func__);
3876    return rc;
3877}
3878
3879/*===========================================================================
3880 * FUNCTION   : construct_default_request_settings
3881 *
3882 * DESCRIPTION: Configure a settings buffer to meet the required use case
3883 *
3884 * PARAMETERS :
3885 *
3886 *
3887 * RETURN     : Success: Return valid metadata
3888 *              Failure: Return NULL
3889 *==========================================================================*/
3890const camera_metadata_t* QCamera3HardwareInterface::
3891    construct_default_request_settings(const struct camera3_device *device,
3892                                        int type)
3893{
3894
3895    ALOGV("%s: E", __func__);
3896    camera_metadata_t* fwk_metadata = NULL;
3897    QCamera3HardwareInterface *hw =
3898        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3899    if (!hw) {
3900        ALOGE("%s: NULL camera device", __func__);
3901        return NULL;
3902    }
3903
3904    fwk_metadata = hw->translateCapabilityToMetadata(type);
3905
3906    ALOGV("%s: X", __func__);
3907    return fwk_metadata;
3908}
3909
3910/*===========================================================================
3911 * FUNCTION   : process_capture_request
3912 *
3913 * DESCRIPTION:
3914 *
3915 * PARAMETERS :
3916 *
3917 *
3918 * RETURN     :
3919 *==========================================================================*/
3920int QCamera3HardwareInterface::process_capture_request(
3921                    const struct camera3_device *device,
3922                    camera3_capture_request_t *request)
3923{
3924    ALOGV("%s: E", __func__);
3925    QCamera3HardwareInterface *hw =
3926        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3927    if (!hw) {
3928        ALOGE("%s: NULL camera device", __func__);
3929        return -EINVAL;
3930    }
3931
3932    int rc = hw->processCaptureRequest(request);
3933    ALOGV("%s: X", __func__);
3934    return rc;
3935}
3936
3937/*===========================================================================
3938 * FUNCTION   : get_metadata_vendor_tag_ops
3939 *
3940 * DESCRIPTION:
3941 *
3942 * PARAMETERS :
3943 *
3944 *
3945 * RETURN     :
3946 *==========================================================================*/
3947
3948void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3949                const struct camera3_device *device,
3950                vendor_tag_query_ops_t* ops)
3951{
3952    ALOGV("%s: E", __func__);
3953    QCamera3HardwareInterface *hw =
3954        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3955    if (!hw) {
3956        ALOGE("%s: NULL camera device", __func__);
3957        return;
3958    }
3959
3960    hw->getMetadataVendorTagOps(ops);
3961    ALOGV("%s: X", __func__);
3962    return;
3963}
3964
3965/*===========================================================================
3966 * FUNCTION   : dump
3967 *
3968 * DESCRIPTION:
3969 *
3970 * PARAMETERS :
3971 *
3972 *
3973 * RETURN     :
3974 *==========================================================================*/
3975
3976void QCamera3HardwareInterface::dump(
3977                const struct camera3_device *device, int fd)
3978{
3979    ALOGV("%s: E", __func__);
3980    QCamera3HardwareInterface *hw =
3981        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3982    if (!hw) {
3983        ALOGE("%s: NULL camera device", __func__);
3984        return;
3985    }
3986
3987    hw->dump(fd);
3988    ALOGV("%s: X", __func__);
3989    return;
3990}
3991
3992/*===========================================================================
3993 * FUNCTION   : flush
3994 *
3995 * DESCRIPTION:
3996 *
3997 * PARAMETERS :
3998 *
3999 *
4000 * RETURN     :
4001 *==========================================================================*/
4002
4003int QCamera3HardwareInterface::flush(
4004                const struct camera3_device *device)
4005{
4006    int rc;
4007    ALOGV("%s: E", __func__);
4008    QCamera3HardwareInterface *hw =
4009        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4010    if (!hw) {
4011        ALOGE("%s: NULL camera device", __func__);
4012        return -EINVAL;
4013    }
4014
4015    rc = hw->flush();
4016    ALOGV("%s: X", __func__);
4017    return rc;
4018}
4019
4020/*===========================================================================
4021 * FUNCTION   : close_camera_device
4022 *
4023 * DESCRIPTION:
4024 *
4025 * PARAMETERS :
4026 *
4027 *
4028 * RETURN     :
4029 *==========================================================================*/
4030int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4031{
4032    ALOGV("%s: E", __func__);
4033    int ret = NO_ERROR;
4034    QCamera3HardwareInterface *hw =
4035        reinterpret_cast<QCamera3HardwareInterface *>(
4036            reinterpret_cast<camera3_device_t *>(device)->priv);
4037    if (!hw) {
4038        ALOGE("NULL camera device");
4039        return BAD_VALUE;
4040    }
4041    delete hw;
4042
4043    pthread_mutex_lock(&mCameraSessionLock);
4044    mCameraSessionActive = 0;
4045    pthread_mutex_unlock(&mCameraSessionLock);
4046    ALOGV("%s: X", __func__);
4047    return ret;
4048}
4049
4050/*===========================================================================
4051 * FUNCTION   : getWaveletDenoiseProcessPlate
4052 *
4053 * DESCRIPTION: query wavelet denoise process plate
4054 *
4055 * PARAMETERS : None
4056 *
4057 * RETURN     : WNR prcocess plate vlaue
4058 *==========================================================================*/
4059cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4060{
4061    char prop[PROPERTY_VALUE_MAX];
4062    memset(prop, 0, sizeof(prop));
4063    property_get("persist.denoise.process.plates", prop, "0");
4064    int processPlate = atoi(prop);
4065    switch(processPlate) {
4066    case 0:
4067        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4068    case 1:
4069        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4070    case 2:
4071        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4072    case 3:
4073        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4074    default:
4075        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4076    }
4077}
4078
4079/*===========================================================================
4080 * FUNCTION   : needRotationReprocess
4081 *
4082 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4083 *
4084 * PARAMETERS : none
4085 *
4086 * RETURN     : true: needed
4087 *              false: no need
4088 *==========================================================================*/
4089bool QCamera3HardwareInterface::needRotationReprocess()
4090{
4091
4092    if (!mJpegSettings->is_jpeg_format) {
4093        // RAW image, no need to reprocess
4094        return false;
4095    }
4096
4097    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4098        mJpegSettings->jpeg_orientation > 0) {
4099        // current rotation is not zero, and pp has the capability to process rotation
4100        ALOGD("%s: need do reprocess for rotation", __func__);
4101        return true;
4102    }
4103
4104    return false;
4105}
4106
4107/*===========================================================================
4108 * FUNCTION   : needReprocess
4109 *
4110 * DESCRIPTION: if reprocess in needed
4111 *
4112 * PARAMETERS : none
4113 *
4114 * RETURN     : true: needed
4115 *              false: no need
4116 *==========================================================================*/
4117bool QCamera3HardwareInterface::needReprocess()
4118{
4119    if (!mJpegSettings->is_jpeg_format) {
4120        // RAW image, no need to reprocess
4121        return false;
4122    }
4123
4124    if ((mJpegSettings->min_required_pp_mask > 0) ||
4125         isWNREnabled()) {
4126        // TODO: add for ZSL HDR later
4127        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4128        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4129        return true;
4130    }
4131    return needRotationReprocess();
4132}
4133
4134/*===========================================================================
4135 * FUNCTION   : addOnlineReprocChannel
4136 *
4137 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4138 *              coming from input channel
4139 *
4140 * PARAMETERS :
4141 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4142 *
4143 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4144 *==========================================================================*/
4145QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4146              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4147{
4148    int32_t rc = NO_ERROR;
4149    QCamera3ReprocessChannel *pChannel = NULL;
4150    if (pInputChannel == NULL) {
4151        ALOGE("%s: input channel obj is NULL", __func__);
4152        return NULL;
4153    }
4154
4155    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4156            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4157    if (NULL == pChannel) {
4158        ALOGE("%s: no mem for reprocess channel", __func__);
4159        return NULL;
4160    }
4161
4162    // Capture channel, only need snapshot and postview streams start together
4163    mm_camera_channel_attr_t attr;
4164    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4165    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4166    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4167    rc = pChannel->initialize();
4168    if (rc != NO_ERROR) {
4169        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4170        delete pChannel;
4171        return NULL;
4172    }
4173
4174    // pp feature config
4175    cam_pp_feature_config_t pp_config;
4176    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4177    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4178        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4179        pp_config.sharpness = mJpegSettings->sharpness;
4180    }
4181
4182    if (isWNREnabled()) {
4183        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4184        pp_config.denoise2d.denoise_enable = 1;
4185        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4186    }
4187    if (needRotationReprocess()) {
4188        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4189        int rotation = mJpegSettings->jpeg_orientation;
4190        if (rotation == 0) {
4191            pp_config.rotation = ROTATE_0;
4192        } else if (rotation == 90) {
4193            pp_config.rotation = ROTATE_90;
4194        } else if (rotation == 180) {
4195            pp_config.rotation = ROTATE_180;
4196        } else if (rotation == 270) {
4197            pp_config.rotation = ROTATE_270;
4198        }
4199    }
4200
4201   rc = pChannel->addReprocStreamsFromSource(pp_config,
4202                                             pInputChannel,
4203                                             mMetadataChannel);
4204
4205    if (rc != NO_ERROR) {
4206        delete pChannel;
4207        return NULL;
4208    }
4209    return pChannel;
4210}
4211
4212int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4213{
4214    return gCamCapability[mCameraId]->min_num_pp_bufs;
4215}
4216
4217bool QCamera3HardwareInterface::isWNREnabled() {
4218    return gCamCapability[mCameraId]->isWnrSupported;
4219}
4220
4221}; //end namespace qcamera
4222