QCamera3HWI.cpp revision d3e9508eb0f78c4911f51d779b4e7f2c4eb6fcc7
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if ((*it)->registered && (*it)->buffer_set.buffers) {
240             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
241        }
242        if (channel)
243            delete channel;
244        free (*it);
245    }
246
247    mPictureChannel = NULL;
248
249    if (mJpegSettings != NULL) {
250        free(mJpegSettings);
251        mJpegSettings = NULL;
252    }
253
254    /* Clean up all channels */
255    if (mCameraInitialized) {
256        if (mMetadataChannel) {
257            mMetadataChannel->stop();
258            delete mMetadataChannel;
259            mMetadataChannel = NULL;
260        }
261        deinitParameters();
262    }
263
264    if (mCameraOpened)
265        closeCamera();
266
267    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
268        if (mDefaultMetadata[i])
269            free_camera_metadata(mDefaultMetadata[i]);
270
271    pthread_cond_destroy(&mRequestCond);
272
273    pthread_mutex_destroy(&mMutex);
274    ALOGV("%s: X", __func__);
275}
276
277/*===========================================================================
278 * FUNCTION   : openCamera
279 *
280 * DESCRIPTION: open camera
281 *
282 * PARAMETERS :
283 *   @hw_device  : double ptr for camera device struct
284 *
285 * RETURN     : int32_t type of status
286 *              NO_ERROR  -- success
287 *              none-zero failure code
288 *==========================================================================*/
289int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
290{
291    int rc = 0;
292    pthread_mutex_lock(&mCameraSessionLock);
293    if (mCameraSessionActive) {
294        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
295        pthread_mutex_unlock(&mCameraSessionLock);
296        return -EDQUOT;
297    }
298
299    if (mCameraOpened) {
300        *hw_device = NULL;
301        return PERMISSION_DENIED;
302    }
303
304    rc = openCamera();
305    if (rc == 0) {
306        *hw_device = &mCameraDevice.common;
307        mCameraSessionActive = 1;
308    } else
309        *hw_device = NULL;
310
311#ifdef HAS_MULTIMEDIA_HINTS
312    if (rc == 0) {
313        if (m_pPowerModule) {
314            if (m_pPowerModule->powerHint) {
315                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
316                        (void *)"state=1");
317            }
318        }
319    }
320#endif
321    pthread_mutex_unlock(&mCameraSessionLock);
322    return rc;
323}
324
325/*===========================================================================
326 * FUNCTION   : openCamera
327 *
328 * DESCRIPTION: open camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::openCamera()
337{
338    if (mCameraHandle) {
339        ALOGE("Failure: Camera already opened");
340        return ALREADY_EXISTS;
341    }
342    mCameraHandle = camera_open(mCameraId);
343    if (!mCameraHandle) {
344        ALOGE("camera_open failed.");
345        return UNKNOWN_ERROR;
346    }
347
348    mCameraOpened = true;
349
350    return NO_ERROR;
351}
352
353/*===========================================================================
354 * FUNCTION   : closeCamera
355 *
356 * DESCRIPTION: close camera
357 *
358 * PARAMETERS : none
359 *
360 * RETURN     : int32_t type of status
361 *              NO_ERROR  -- success
362 *              none-zero failure code
363 *==========================================================================*/
364int QCamera3HardwareInterface::closeCamera()
365{
366    int rc = NO_ERROR;
367
368    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
369    mCameraHandle = NULL;
370    mCameraOpened = false;
371
372#ifdef HAS_MULTIMEDIA_HINTS
373    if (rc == NO_ERROR) {
374        if (m_pPowerModule) {
375            if (m_pPowerModule->powerHint) {
376                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
377                        (void *)"state=0");
378            }
379        }
380    }
381#endif
382
383    return rc;
384}
385
386/*===========================================================================
387 * FUNCTION   : initialize
388 *
389 * DESCRIPTION: Initialize frameworks callback functions
390 *
391 * PARAMETERS :
392 *   @callback_ops : callback function to frameworks
393 *
394 * RETURN     :
395 *
396 *==========================================================================*/
397int QCamera3HardwareInterface::initialize(
398        const struct camera3_callback_ops *callback_ops)
399{
400    int rc;
401
402    pthread_mutex_lock(&mMutex);
403
404    rc = initParameters();
405    if (rc < 0) {
406        ALOGE("%s: initParamters failed %d", __func__, rc);
407       goto err1;
408    }
409    mCallbackOps = callback_ops;
410
411    pthread_mutex_unlock(&mMutex);
412    mCameraInitialized = true;
413    return 0;
414
415err1:
416    pthread_mutex_unlock(&mMutex);
417    return rc;
418}
419
420/*===========================================================================
421 * FUNCTION   : configureStreams
422 *
423 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
424 *              and output streams.
425 *
426 * PARAMETERS :
427 *   @stream_list : streams to be configured
428 *
429 * RETURN     :
430 *
431 *==========================================================================*/
432int QCamera3HardwareInterface::configureStreams(
433        camera3_stream_configuration_t *streamList)
434{
435    int rc = 0;
436    mIsZslMode = false;
437
438    // Sanity check stream_list
439    if (streamList == NULL) {
440        ALOGE("%s: NULL stream configuration", __func__);
441        return BAD_VALUE;
442    }
443    if (streamList->streams == NULL) {
444        ALOGE("%s: NULL stream list", __func__);
445        return BAD_VALUE;
446    }
447
448    if (streamList->num_streams < 1) {
449        ALOGE("%s: Bad number of streams requested: %d", __func__,
450                streamList->num_streams);
451        return BAD_VALUE;
452    }
453
454    /* first invalidate all the steams in the mStreamList
455     * if they appear again, they will be validated */
456    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
457            it != mStreamInfo.end(); it++) {
458        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
459        channel->stop();
460        (*it)->status = INVALID;
461    }
462    if (mMetadataChannel) {
463        /* If content of mStreamInfo is not 0, there is metadata stream */
464        mMetadataChannel->stop();
465    }
466
467    pthread_mutex_lock(&mMutex);
468
469    camera3_stream_t *inputStream = NULL;
470    camera3_stream_t *jpegStream = NULL;
471    cam_stream_size_info_t stream_config_info;
472
473    for (size_t i = 0; i < streamList->num_streams; i++) {
474        camera3_stream_t *newStream = streamList->streams[i];
475        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
476                __func__, newStream->stream_type, newStream->format,
477                 newStream->width, newStream->height);
478        //if the stream is in the mStreamList validate it
479        bool stream_exists = false;
480        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
481                it != mStreamInfo.end(); it++) {
482            if ((*it)->stream == newStream) {
483                QCamera3Channel *channel =
484                    (QCamera3Channel*)(*it)->stream->priv;
485                stream_exists = true;
486                (*it)->status = RECONFIGURE;
487                /*delete the channel object associated with the stream because
488                  we need to reconfigure*/
489                delete channel;
490                (*it)->stream->priv = NULL;
491            }
492        }
493        if (!stream_exists) {
494            //new stream
495            stream_info_t* stream_info;
496            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
497            stream_info->stream = newStream;
498            stream_info->status = VALID;
499            stream_info->registered = 0;
500            mStreamInfo.push_back(stream_info);
501        }
502        if (newStream->stream_type == CAMERA3_STREAM_INPUT
503                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
504            if (inputStream != NULL) {
505                ALOGE("%s: Multiple input streams requested!", __func__);
506                pthread_mutex_unlock(&mMutex);
507                return BAD_VALUE;
508            }
509            inputStream = newStream;
510        }
511        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
512            jpegStream = newStream;
513        }
514    }
515    mInputStream = inputStream;
516
517    /*clean up invalid streams*/
518    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
519            it != mStreamInfo.end();) {
520        if(((*it)->status) == INVALID){
521            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
522            delete channel;
523            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
524            free(*it);
525            it = mStreamInfo.erase(it);
526        } else {
527            it++;
528        }
529    }
530    if (mMetadataChannel) {
531        delete mMetadataChannel;
532        mMetadataChannel = NULL;
533    }
534
535    //Create metadata channel and initialize it
536    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
537                    mCameraHandle->ops, captureResultCb,
538                    &gCamCapability[mCameraId]->padding_info, this);
539    if (mMetadataChannel == NULL) {
540        ALOGE("%s: failed to allocate metadata channel", __func__);
541        rc = -ENOMEM;
542        pthread_mutex_unlock(&mMutex);
543        return rc;
544    }
545    rc = mMetadataChannel->initialize();
546    if (rc < 0) {
547        ALOGE("%s: metadata channel initialization failed", __func__);
548        delete mMetadataChannel;
549        pthread_mutex_unlock(&mMutex);
550        return rc;
551    }
552
553    /* Allocate channel objects for the requested streams */
554    for (size_t i = 0; i < streamList->num_streams; i++) {
555        camera3_stream_t *newStream = streamList->streams[i];
556        uint32_t stream_usage = newStream->usage;
557        stream_config_info.stream_sizes[i].width = newStream->width;
558        stream_config_info.stream_sizes[i].height = newStream->height;
559        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
560            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
561            //for zsl stream the size is jpeg size
562            stream_config_info.stream_sizes[i].width = jpegStream->width;
563            stream_config_info.stream_sizes[i].height = jpegStream->height;
564            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
565        } else {
566           //for non zsl streams find out the format
567           switch (newStream->format) {
568           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
569              {
570                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
571                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
572                 } else {
573                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
574                 }
575              }
576              break;
577           case HAL_PIXEL_FORMAT_YCbCr_420_888:
578              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
579              break;
580           case HAL_PIXEL_FORMAT_BLOB:
581              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
582              break;
583           default:
584              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
585              break;
586           }
587        }
588        if (newStream->priv == NULL) {
589            //New stream, construct channel
590            switch (newStream->stream_type) {
591            case CAMERA3_STREAM_INPUT:
592                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
593                break;
594            case CAMERA3_STREAM_BIDIRECTIONAL:
595                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
596                    GRALLOC_USAGE_HW_CAMERA_WRITE;
597                break;
598            case CAMERA3_STREAM_OUTPUT:
599                /* For video encoding stream, set read/write rarely
600                 * flag so that they may be set to un-cached */
601                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
602                    newStream->usage =
603                         (GRALLOC_USAGE_SW_READ_RARELY |
604                         GRALLOC_USAGE_SW_WRITE_RARELY |
605                         GRALLOC_USAGE_HW_CAMERA_WRITE);
606                else
607                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
608                break;
609            default:
610                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
611                break;
612            }
613
614            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
615                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
616                QCamera3Channel *channel;
617                switch (newStream->format) {
618                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
619                case HAL_PIXEL_FORMAT_YCbCr_420_888:
620                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
621                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
622                        jpegStream) {
623                        uint32_t width = jpegStream->width;
624                        uint32_t height = jpegStream->height;
625                        mIsZslMode = true;
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream,
629                            width, height);
630                    } else
631                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
632                            mCameraHandle->ops, captureResultCb,
633                            &gCamCapability[mCameraId]->padding_info, this, newStream);
634                    if (channel == NULL) {
635                        ALOGE("%s: allocation of channel failed", __func__);
636                        pthread_mutex_unlock(&mMutex);
637                        return -ENOMEM;
638                    }
639
640                    newStream->priv = channel;
641                    break;
642                case HAL_PIXEL_FORMAT_BLOB:
643                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
644                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
645                            mCameraHandle->ops, captureResultCb,
646                            &gCamCapability[mCameraId]->padding_info, this, newStream);
647                    if (mPictureChannel == NULL) {
648                        ALOGE("%s: allocation of channel failed", __func__);
649                        pthread_mutex_unlock(&mMutex);
650                        return -ENOMEM;
651                    }
652                    newStream->priv = (QCamera3Channel*)mPictureChannel;
653                    break;
654
655                //TODO: Add support for app consumed format?
656                default:
657                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
658                    break;
659                }
660            }
661        } else {
662            // Channel already exists for this stream
663            // Do nothing for now
664        }
665    }
666    /*For the streams to be reconfigured we need to register the buffers
667      since the framework wont*/
668    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669            it != mStreamInfo.end(); it++) {
670        if ((*it)->status == RECONFIGURE) {
671            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
672            /*only register buffers for streams that have already been
673              registered*/
674            if ((*it)->registered) {
675                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
676                        (*it)->buffer_set.buffers);
677                if (rc != NO_ERROR) {
678                    ALOGE("%s: Failed to register the buffers of old stream,\
679                            rc = %d", __func__, rc);
680                }
681                ALOGV("%s: channel %p has %d buffers",
682                        __func__, channel, (*it)->buffer_set.num_buffers);
683            }
684        }
685
686        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
687        if (index == NAME_NOT_FOUND) {
688            mPendingBuffersMap.add((*it)->stream, 0);
689        } else {
690            mPendingBuffersMap.editValueAt(index) = 0;
691        }
692    }
693
694    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
695    mPendingRequestsList.clear();
696
697    /*flush the metadata list*/
698    if (!mStoredMetadataList.empty()) {
699        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
700              m != mStoredMetadataList.end(); m++) {
701            mMetadataChannel->bufDone(m->meta_buf);
702            free(m->meta_buf);
703            m = mStoredMetadataList.erase(m);
704        }
705    }
706    int32_t hal_version = CAM_HAL_V3;
707    stream_config_info.num_streams = streamList->num_streams;
708
709    //settings/parameters don't carry over for new configureStreams
710    memset(mParameters, 0, sizeof(parm_buffer_t));
711
712    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
713    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
714                sizeof(hal_version), &hal_version);
715
716    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
717                sizeof(stream_config_info), &stream_config_info);
718
719    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
720
721    mFirstRequest = true;
722
723    //Get min frame duration for this streams configuration
724    deriveMinFrameDuration();
725
726    pthread_mutex_unlock(&mMutex);
727    return rc;
728}
729
730/*===========================================================================
731 * FUNCTION   : validateCaptureRequest
732 *
733 * DESCRIPTION: validate a capture request from camera service
734 *
735 * PARAMETERS :
736 *   @request : request from framework to process
737 *
738 * RETURN     :
739 *
740 *==========================================================================*/
741int QCamera3HardwareInterface::validateCaptureRequest(
742                    camera3_capture_request_t *request)
743{
744    ssize_t idx = 0;
745    const camera3_stream_buffer_t *b;
746    CameraMetadata meta;
747
748    /* Sanity check the request */
749    if (request == NULL) {
750        ALOGE("%s: NULL capture request", __func__);
751        return BAD_VALUE;
752    }
753
754    uint32_t frameNumber = request->frame_number;
755    if (request->input_buffer != NULL &&
756            request->input_buffer->stream != mInputStream) {
757        ALOGE("%s: Request %d: Input buffer not from input stream!",
758                __FUNCTION__, frameNumber);
759        return BAD_VALUE;
760    }
761    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
762        ALOGE("%s: Request %d: No output buffers provided!",
763                __FUNCTION__, frameNumber);
764        return BAD_VALUE;
765    }
766    if (request->input_buffer != NULL) {
767        b = request->input_buffer;
768        QCamera3Channel *channel =
769            static_cast<QCamera3Channel*>(b->stream->priv);
770        if (channel == NULL) {
771            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
772                    __func__, frameNumber, idx);
773            return BAD_VALUE;
774        }
775        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
776            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
777                    __func__, frameNumber, idx);
778            return BAD_VALUE;
779        }
780        if (b->release_fence != -1) {
781            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
782                    __func__, frameNumber, idx);
783            return BAD_VALUE;
784        }
785        if (b->buffer == NULL) {
786            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
787                    __func__, frameNumber, idx);
788            return BAD_VALUE;
789        }
790    }
791
792    // Validate all buffers
793    b = request->output_buffers;
794    do {
795        QCamera3Channel *channel =
796                static_cast<QCamera3Channel*>(b->stream->priv);
797        if (channel == NULL) {
798            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
803            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807        if (b->release_fence != -1) {
808            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
809                    __func__, frameNumber, idx);
810            return BAD_VALUE;
811        }
812        if (b->buffer == NULL) {
813            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
814                    __func__, frameNumber, idx);
815            return BAD_VALUE;
816        }
817        idx++;
818        b = request->output_buffers + idx;
819    } while (idx < (ssize_t)request->num_output_buffers);
820
821    return NO_ERROR;
822}
823
824/*===========================================================================
825 * FUNCTION   : deriveMinFrameDuration
826 *
827 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
828 *              on currently configured streams.
829 *
830 * PARAMETERS : NONE
831 *
832 * RETURN     : NONE
833 *
834 *==========================================================================*/
835void QCamera3HardwareInterface::deriveMinFrameDuration()
836{
837    int32_t maxJpegDimension, maxProcessedDimension;
838
839    maxJpegDimension = 0;
840    maxProcessedDimension = 0;
841
842    // Figure out maximum jpeg, processed, and raw dimensions
843    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
844        it != mStreamInfo.end(); it++) {
845
846        // Input stream doesn't have valid stream_type
847        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
848            continue;
849
850        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
851        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
852            if (dimension > maxJpegDimension)
853                maxJpegDimension = dimension;
854        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
855            if (dimension > maxProcessedDimension)
856                maxProcessedDimension = dimension;
857        }
858    }
859
860    //Assume all jpeg dimensions are in processed dimensions.
861    if (maxJpegDimension > maxProcessedDimension)
862        maxProcessedDimension = maxJpegDimension;
863
864    //Find minimum durations for processed, jpeg, and raw
865    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
866    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
867        if (maxProcessedDimension ==
868            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
869            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
870            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
871            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
872            break;
873        }
874    }
875}
876
877/*===========================================================================
878 * FUNCTION   : getMinFrameDuration
879 *
880 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
881 *              and current request configuration.
882 *
883 * PARAMETERS : @request: requset sent by the frameworks
884 *
885 * RETURN     : min farme duration for a particular request
886 *
887 *==========================================================================*/
888int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
889{
890    bool hasJpegStream = false;
891    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
892        const camera3_stream_t *stream = request->output_buffers[i].stream;
893        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
894            hasJpegStream = true;
895    }
896
897    if (!hasJpegStream)
898        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
899    else
900        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
901}
902
903/*===========================================================================
904 * FUNCTION   : registerStreamBuffers
905 *
906 * DESCRIPTION: Register buffers for a given stream with the HAL device.
907 *
908 * PARAMETERS :
909 *   @stream_list : streams to be configured
910 *
911 * RETURN     :
912 *
913 *==========================================================================*/
914int QCamera3HardwareInterface::registerStreamBuffers(
915        const camera3_stream_buffer_set_t *buffer_set)
916{
917    int rc = 0;
918
919    pthread_mutex_lock(&mMutex);
920
921    if (buffer_set == NULL) {
922        ALOGE("%s: Invalid buffer_set parameter.", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return -EINVAL;
925    }
926    if (buffer_set->stream == NULL) {
927        ALOGE("%s: Invalid stream parameter.", __func__);
928        pthread_mutex_unlock(&mMutex);
929        return -EINVAL;
930    }
931    if (buffer_set->num_buffers < 1) {
932        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
933        pthread_mutex_unlock(&mMutex);
934        return -EINVAL;
935    }
936    if (buffer_set->buffers == NULL) {
937        ALOGE("%s: Invalid buffers parameter.", __func__);
938        pthread_mutex_unlock(&mMutex);
939        return -EINVAL;
940    }
941
942    camera3_stream_t *stream = buffer_set->stream;
943    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
944
945    //set the buffer_set in the mStreamInfo array
946    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
947            it != mStreamInfo.end(); it++) {
948        if ((*it)->stream == stream) {
949            uint32_t numBuffers = buffer_set->num_buffers;
950            (*it)->buffer_set.stream = buffer_set->stream;
951            (*it)->buffer_set.num_buffers = numBuffers;
952            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
953            if ((*it)->buffer_set.buffers == NULL) {
954                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
955                pthread_mutex_unlock(&mMutex);
956                return -ENOMEM;
957            }
958            for (size_t j = 0; j < numBuffers; j++){
959                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
960            }
961            (*it)->registered = 1;
962        }
963    }
964    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
965    if (rc < 0) {
966        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
967        pthread_mutex_unlock(&mMutex);
968        return -ENODEV;
969    }
970
971    pthread_mutex_unlock(&mMutex);
972    return NO_ERROR;
973}
974
975/*===========================================================================
976 * FUNCTION   : processCaptureRequest
977 *
978 * DESCRIPTION: process a capture request from camera service
979 *
980 * PARAMETERS :
981 *   @request : request from framework to process
982 *
983 * RETURN     :
984 *
985 *==========================================================================*/
986int QCamera3HardwareInterface::processCaptureRequest(
987                    camera3_capture_request_t *request)
988{
989    int rc = NO_ERROR;
990    int32_t request_id;
991    CameraMetadata meta;
992    MetadataBufferInfo reproc_meta;
993    int queueMetadata = 0;
994
995    pthread_mutex_lock(&mMutex);
996
997    rc = validateCaptureRequest(request);
998    if (rc != NO_ERROR) {
999        ALOGE("%s: incoming request is not valid", __func__);
1000        pthread_mutex_unlock(&mMutex);
1001        return rc;
1002    }
1003
1004    meta = request->settings;
1005
1006    // For first capture request, send capture intent, and
1007    // stream on all streams
1008    if (mFirstRequest) {
1009
1010        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1011            int32_t hal_version = CAM_HAL_V3;
1012            uint8_t captureIntent =
1013                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1014
1015            memset(mParameters, 0, sizeof(parm_buffer_t));
1016            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1017            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1018                sizeof(hal_version), &hal_version);
1019            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1020                sizeof(captureIntent), &captureIntent);
1021            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1022                mParameters);
1023        }
1024
1025        mMetadataChannel->start();
1026        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1027            it != mStreamInfo.end(); it++) {
1028            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1029            channel->start();
1030        }
1031    }
1032
1033    uint32_t frameNumber = request->frame_number;
1034    uint32_t streamTypeMask = 0;
1035
1036    if (meta.exists(ANDROID_REQUEST_ID)) {
1037        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1038        mCurrentRequestId = request_id;
1039        ALOGV("%s: Received request with id: %d",__func__, request_id);
1040    } else if (mFirstRequest || mCurrentRequestId == -1){
1041        ALOGE("%s: Unable to find request id field, \
1042                & no previous id available", __func__);
1043        return NAME_NOT_FOUND;
1044    } else {
1045        ALOGV("%s: Re-using old request id", __func__);
1046        request_id = mCurrentRequestId;
1047    }
1048
1049    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1050                                    __func__, __LINE__,
1051                                    request->num_output_buffers,
1052                                    request->input_buffer,
1053                                    frameNumber);
1054    // Acquire all request buffers first
1055    int blob_request = 0;
1056    for (size_t i = 0; i < request->num_output_buffers; i++) {
1057        const camera3_stream_buffer_t& output = request->output_buffers[i];
1058        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1059        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1060
1061        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1062        //Call function to store local copy of jpeg data for encode params.
1063            blob_request = 1;
1064            rc = getJpegSettings(request->settings);
1065            if (rc < 0) {
1066                ALOGE("%s: failed to get jpeg parameters", __func__);
1067                pthread_mutex_unlock(&mMutex);
1068                return rc;
1069            }
1070        }
1071
1072        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1073        if (rc != OK) {
1074            ALOGE("%s: fence wait failed %d", __func__, rc);
1075            pthread_mutex_unlock(&mMutex);
1076            return rc;
1077        }
1078        streamTypeMask |= channel->getStreamTypeMask();
1079    }
1080
1081    rc = setFrameParameters(request, streamTypeMask);
1082    if (rc < 0) {
1083        ALOGE("%s: fail to set frame parameters", __func__);
1084        pthread_mutex_unlock(&mMutex);
1085        return rc;
1086    }
1087
1088    /* Update pending request list and pending buffers map */
1089    PendingRequestInfo pendingRequest;
1090    pendingRequest.frame_number = frameNumber;
1091    pendingRequest.num_buffers = request->num_output_buffers;
1092    pendingRequest.request_id = request_id;
1093    pendingRequest.blob_request = blob_request;
1094    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1095
1096    for (size_t i = 0; i < request->num_output_buffers; i++) {
1097        RequestedBufferInfo requestedBuf;
1098        requestedBuf.stream = request->output_buffers[i].stream;
1099        requestedBuf.buffer = NULL;
1100        pendingRequest.buffers.push_back(requestedBuf);
1101
1102        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1103    }
1104    mPendingRequestsList.push_back(pendingRequest);
1105
1106    // Notify metadata channel we receive a request
1107    mMetadataChannel->request(NULL, frameNumber);
1108
1109    // Call request on other streams
1110    for (size_t i = 0; i < request->num_output_buffers; i++) {
1111        const camera3_stream_buffer_t& output = request->output_buffers[i];
1112        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1113        mm_camera_buf_def_t *pInputBuffer = NULL;
1114
1115        if (channel == NULL) {
1116            ALOGE("%s: invalid channel pointer for stream", __func__);
1117            continue;
1118        }
1119
1120        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1121            QCamera3RegularChannel* inputChannel = NULL;
1122            if(request->input_buffer != NULL){
1123                //Try to get the internal format
1124                inputChannel = (QCamera3RegularChannel*)
1125                    request->input_buffer->stream->priv;
1126                if(inputChannel == NULL ){
1127                    ALOGE("%s: failed to get input channel handle", __func__);
1128                } else {
1129                    pInputBuffer =
1130                        inputChannel->getInternalFormatBuffer(
1131                                request->input_buffer->buffer);
1132                    ALOGD("%s: Input buffer dump",__func__);
1133                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1134                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1135                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1136                    ALOGD("Handle:%p", request->input_buffer->buffer);
1137                    //TODO: need to get corresponding metadata and send it to pproc
1138                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1139                         m != mStoredMetadataList.end(); m++) {
1140                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1141                            reproc_meta.meta_buf = m->meta_buf;
1142                            queueMetadata = 1;
1143                            break;
1144                        }
1145                    }
1146                }
1147            }
1148            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1149                            pInputBuffer,(QCamera3Channel*)inputChannel);
1150            if (queueMetadata) {
1151                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1152            }
1153        } else {
1154            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1155                __LINE__, output.buffer, frameNumber);
1156            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1157                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1158                     m != mStoredMetadataList.end(); m++) {
1159                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1160                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1161                            mMetadataChannel->bufDone(m->meta_buf);
1162                            free(m->meta_buf);
1163                            m = mStoredMetadataList.erase(m);
1164                            break;
1165                        }
1166                   }
1167                }
1168            }
1169            rc = channel->request(output.buffer, frameNumber);
1170        }
1171        if (rc < 0)
1172            ALOGE("%s: request failed", __func__);
1173    }
1174
1175    mFirstRequest = false;
1176    // Added a timed condition wait
1177    struct timespec ts;
1178    uint8_t isValidTimeout = 1;
1179    rc = clock_gettime(CLOCK_REALTIME, &ts);
1180    if (rc < 0) {
1181        isValidTimeout = 0;
1182        ALOGE("%s: Error reading the real time clock!!", __func__);
1183    }
1184    else {
1185        // Make timeout as 5 sec for request to be honored
1186        ts.tv_sec += 5;
1187    }
1188    //Block on conditional variable
1189    mPendingRequest = 1;
1190    while (mPendingRequest == 1) {
1191        if (!isValidTimeout) {
1192            ALOGV("%s: Blocking on conditional wait", __func__);
1193            pthread_cond_wait(&mRequestCond, &mMutex);
1194        }
1195        else {
1196            ALOGV("%s: Blocking on timed conditional wait", __func__);
1197            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1198            if (rc == ETIMEDOUT) {
1199                rc = -ENODEV;
1200                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1201                break;
1202            }
1203        }
1204        ALOGV("%s: Unblocked", __func__);
1205    }
1206
1207    pthread_mutex_unlock(&mMutex);
1208    return rc;
1209}
1210
1211/*===========================================================================
1212 * FUNCTION   : getMetadataVendorTagOps
1213 *
1214 * DESCRIPTION:
1215 *
1216 * PARAMETERS :
1217 *
1218 *
1219 * RETURN     :
1220 *==========================================================================*/
1221void QCamera3HardwareInterface::getMetadataVendorTagOps(
1222                    vendor_tag_query_ops_t* /*ops*/)
1223{
1224    /* Enable locks when we eventually add Vendor Tags */
1225    /*
1226    pthread_mutex_lock(&mMutex);
1227
1228    pthread_mutex_unlock(&mMutex);
1229    */
1230    return;
1231}
1232
1233/*===========================================================================
1234 * FUNCTION   : dump
1235 *
1236 * DESCRIPTION:
1237 *
1238 * PARAMETERS :
1239 *
1240 *
1241 * RETURN     :
1242 *==========================================================================*/
1243void QCamera3HardwareInterface::dump(int /*fd*/)
1244{
1245    /*Enable lock when we implement this function*/
1246    /*
1247    pthread_mutex_lock(&mMutex);
1248
1249    pthread_mutex_unlock(&mMutex);
1250    */
1251    return;
1252}
1253
1254/*===========================================================================
1255 * FUNCTION   : flush
1256 *
1257 * DESCRIPTION:
1258 *
1259 * PARAMETERS :
1260 *
1261 *
1262 * RETURN     :
1263 *==========================================================================*/
1264int QCamera3HardwareInterface::flush()
1265{
1266    /*Enable lock when we implement this function*/
1267    /*
1268    pthread_mutex_lock(&mMutex);
1269
1270    pthread_mutex_unlock(&mMutex);
1271    */
1272    return 0;
1273}
1274
1275/*===========================================================================
1276 * FUNCTION   : captureResultCb
1277 *
1278 * DESCRIPTION: Callback handler for all capture result
1279 *              (streams, as well as metadata)
1280 *
1281 * PARAMETERS :
1282 *   @metadata : metadata information
1283 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1284 *               NULL if metadata.
1285 *
1286 * RETURN     : NONE
1287 *==========================================================================*/
1288void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1289                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1290{
1291    pthread_mutex_lock(&mMutex);
1292
1293    if (metadata_buf) {
1294        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1295        int32_t frame_number_valid = *(int32_t *)
1296            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1297        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1298            CAM_INTF_META_PENDING_REQUESTS, metadata);
1299        uint32_t frame_number = *(uint32_t *)
1300            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1301        const struct timeval *tv = (const struct timeval *)
1302            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1303        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1304            tv->tv_usec * NSEC_PER_USEC;
1305
1306        if (!frame_number_valid) {
1307            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1308            mMetadataChannel->bufDone(metadata_buf);
1309            free(metadata_buf);
1310            goto done_metadata;
1311        }
1312        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1313                frame_number, capture_time);
1314
1315        // Go through the pending requests info and send shutter/results to frameworks
1316        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1317                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1318            camera3_capture_result_t result;
1319            camera3_notify_msg_t notify_msg;
1320            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1321
1322            // Flush out all entries with less or equal frame numbers.
1323
1324            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1325            //Right now it's the same as metadata timestamp
1326
1327            //TODO: When there is metadata drop, how do we derive the timestamp of
1328            //dropped frames? For now, we fake the dropped timestamp by substracting
1329            //from the reported timestamp
1330            nsecs_t current_capture_time = capture_time -
1331                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1332
1333            // Send shutter notify to frameworks
1334            notify_msg.type = CAMERA3_MSG_SHUTTER;
1335            notify_msg.message.shutter.frame_number = i->frame_number;
1336            notify_msg.message.shutter.timestamp = current_capture_time;
1337            mCallbackOps->notify(mCallbackOps, &notify_msg);
1338            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1339                    i->frame_number, capture_time);
1340
1341            // Send empty metadata with already filled buffers for dropped metadata
1342            // and send valid metadata with already filled buffers for current metadata
1343            if (i->frame_number < frame_number) {
1344                CameraMetadata dummyMetadata;
1345                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1346                        &current_capture_time, 1);
1347                dummyMetadata.update(ANDROID_REQUEST_ID,
1348                        &(i->request_id), 1);
1349                result.result = dummyMetadata.release();
1350            } else {
1351                result.result = translateCbMetadataToResultMetadata(metadata,
1352                        current_capture_time, i->request_id);
1353                if (mIsZslMode) {
1354                   int found_metadata = 0;
1355                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1356                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1357                        j != i->buffers.end(); j++) {
1358                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1359                         //check if corresp. zsl already exists in the stored metadata list
1360                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1361                               m != mStoredMetadataList.begin(); m++) {
1362                            if (m->frame_number == frame_number) {
1363                               m->meta_buf = metadata_buf;
1364                               found_metadata = 1;
1365                               break;
1366                            }
1367                         }
1368                         if (!found_metadata) {
1369                            MetadataBufferInfo store_meta_info;
1370                            store_meta_info.meta_buf = metadata_buf;
1371                            store_meta_info.frame_number = frame_number;
1372                            mStoredMetadataList.push_back(store_meta_info);
1373                            found_metadata = 1;
1374                         }
1375                      }
1376                   }
1377                   if (!found_metadata) {
1378                       if (!i->input_buffer_present && i->blob_request) {
1379                          //livesnapshot or fallback non-zsl snapshot case
1380                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1381                                j != i->buffers.end(); j++){
1382                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1383                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1384                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1385                                 break;
1386                              }
1387                         }
1388                       } else {
1389                            //return the metadata immediately
1390                            mMetadataChannel->bufDone(metadata_buf);
1391                            free(metadata_buf);
1392                       }
1393                   }
1394               } else if (!mIsZslMode && i->blob_request) {
1395                   //If it is a blob request then send the metadata to the picture channel
1396                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1397               } else {
1398                   // Return metadata buffer
1399                   mMetadataChannel->bufDone(metadata_buf);
1400                   free(metadata_buf);
1401               }
1402
1403            }
1404            if (!result.result) {
1405                ALOGE("%s: metadata is NULL", __func__);
1406            }
1407            result.frame_number = i->frame_number;
1408            result.num_output_buffers = 0;
1409            result.output_buffers = NULL;
1410            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1411                    j != i->buffers.end(); j++) {
1412                if (j->buffer) {
1413                    result.num_output_buffers++;
1414                }
1415            }
1416
1417            if (result.num_output_buffers > 0) {
1418                camera3_stream_buffer_t *result_buffers =
1419                    new camera3_stream_buffer_t[result.num_output_buffers];
1420                if (!result_buffers) {
1421                    ALOGE("%s: Fatal error: out of memory", __func__);
1422                }
1423                size_t result_buffers_idx = 0;
1424                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1425                        j != i->buffers.end(); j++) {
1426                    if (j->buffer) {
1427                        result_buffers[result_buffers_idx++] = *(j->buffer);
1428                        free(j->buffer);
1429                        j->buffer = NULL;
1430                        mPendingBuffersMap.editValueFor(j->stream)--;
1431                    }
1432                }
1433                result.output_buffers = result_buffers;
1434
1435                mCallbackOps->process_capture_result(mCallbackOps, &result);
1436                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1437                        __func__, result.frame_number, current_capture_time);
1438                free_camera_metadata((camera_metadata_t *)result.result);
1439                delete[] result_buffers;
1440            } else {
1441                mCallbackOps->process_capture_result(mCallbackOps, &result);
1442                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1443                        __func__, result.frame_number, current_capture_time);
1444                free_camera_metadata((camera_metadata_t *)result.result);
1445            }
1446            // erase the element from the list
1447            i = mPendingRequestsList.erase(i);
1448        }
1449
1450
1451done_metadata:
1452        bool max_buffers_dequeued = false;
1453        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1454            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1455            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1456            if (queued_buffers == stream->max_buffers) {
1457                max_buffers_dequeued = true;
1458                break;
1459            }
1460        }
1461        if (!max_buffers_dequeued && !pending_requests) {
1462            // Unblock process_capture_request
1463            mPendingRequest = 0;
1464            pthread_cond_signal(&mRequestCond);
1465        }
1466    } else {
1467        // If the frame number doesn't exist in the pending request list,
1468        // directly send the buffer to the frameworks, and update pending buffers map
1469        // Otherwise, book-keep the buffer.
1470        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1471        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1472            i++;
1473        }
1474        if (i == mPendingRequestsList.end()) {
1475            // Verify all pending requests frame_numbers are greater
1476            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1477                    j != mPendingRequestsList.end(); j++) {
1478                if (j->frame_number < frame_number) {
1479                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1480                            __func__, j->frame_number, frame_number);
1481                }
1482            }
1483            camera3_capture_result_t result;
1484            result.result = NULL;
1485            result.frame_number = frame_number;
1486            result.num_output_buffers = 1;
1487            result.output_buffers = buffer;
1488            ALOGV("%s: result frame_number = %d, buffer = %p",
1489                    __func__, frame_number, buffer);
1490            mPendingBuffersMap.editValueFor(buffer->stream)--;
1491            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1492                int found = 0;
1493                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1494                      k != mStoredMetadataList.end(); k++) {
1495                    if (k->frame_number == frame_number) {
1496                        k->zsl_buf_hdl = buffer->buffer;
1497                        found = 1;
1498                        break;
1499                    }
1500                }
1501                if (!found) {
1502                   MetadataBufferInfo meta_info;
1503                   meta_info.frame_number = frame_number;
1504                   meta_info.zsl_buf_hdl = buffer->buffer;
1505                   mStoredMetadataList.push_back(meta_info);
1506                }
1507            }
1508            mCallbackOps->process_capture_result(mCallbackOps, &result);
1509        } else {
1510            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1511                    j != i->buffers.end(); j++) {
1512                if (j->stream == buffer->stream) {
1513                    if (j->buffer != NULL) {
1514                        ALOGE("%s: Error: buffer is already set", __func__);
1515                    } else {
1516                        j->buffer = (camera3_stream_buffer_t *)malloc(
1517                                sizeof(camera3_stream_buffer_t));
1518                        *(j->buffer) = *buffer;
1519                        ALOGV("%s: cache buffer %p at result frame_number %d",
1520                                __func__, buffer, frame_number);
1521                    }
1522                }
1523            }
1524        }
1525    }
1526    pthread_mutex_unlock(&mMutex);
1527    return;
1528}
1529
1530/*===========================================================================
1531 * FUNCTION   : translateCbMetadataToResultMetadata
1532 *
1533 * DESCRIPTION:
1534 *
1535 * PARAMETERS :
1536 *   @metadata : metadata information from callback
1537 *
1538 * RETURN     : camera_metadata_t*
1539 *              metadata in a format specified by fwk
1540 *==========================================================================*/
1541camera_metadata_t*
1542QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1543                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1544                                 int32_t request_id)
1545{
1546    CameraMetadata camMetadata;
1547    camera_metadata_t* resultMetadata;
1548
1549    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1550    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1551
1552    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1553    uint8_t next_entry;
1554    while (curr_entry != CAM_INTF_PARM_MAX) {
1555       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1556       switch (curr_entry) {
1557         case CAM_INTF_META_FACE_DETECTION:{
1558             cam_face_detection_data_t *faceDetectionInfo =
1559                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1560             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1561             int32_t faceIds[numFaces];
1562             uint8_t faceScores[numFaces];
1563             int32_t faceRectangles[numFaces * 4];
1564             int32_t faceLandmarks[numFaces * 6];
1565             int j = 0, k = 0;
1566             for (int i = 0; i < numFaces; i++) {
1567                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1568                 faceScores[i] = faceDetectionInfo->faces[i].score;
1569                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1570                         faceRectangles+j, -1);
1571                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1572                 j+= 4;
1573                 k+= 6;
1574             }
1575             if (numFaces > 0) {
1576                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1577                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1578                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1579                     faceRectangles, numFaces*4);
1580                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1581                     faceLandmarks, numFaces*6);
1582             }
1583            break;
1584            }
1585         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1586             uint8_t  *color_correct_mode =
1587                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1588             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1589             break;
1590          }
1591         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1592             int32_t  *ae_precapture_id =
1593                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1594             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1595             break;
1596          }
1597         case CAM_INTF_META_AEC_ROI: {
1598            cam_area_t  *hAeRegions =
1599                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1600             int32_t aeRegions[5];
1601             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1602             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1603             break;
1604          }
1605          case CAM_INTF_META_AEC_STATE:{
1606             uint8_t *ae_state =
1607                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1608             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1609             break;
1610          }
1611          case CAM_INTF_PARM_FOCUS_MODE:{
1612             uint8_t  *focusMode =
1613                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1614             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1615                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1616             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1617             break;
1618          }
1619          case CAM_INTF_META_AF_ROI:{
1620             /*af regions*/
1621             cam_area_t  *hAfRegions =
1622                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1623             int32_t afRegions[5];
1624             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1625             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1626             break;
1627          }
1628          case CAM_INTF_META_AF_STATE: {
1629             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1630             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1631             break;
1632          }
1633          case CAM_INTF_META_AF_TRIGGER_ID: {
1634             int32_t  *afTriggerId =
1635                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1636             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1637             break;
1638          }
1639          case CAM_INTF_PARM_WHITE_BALANCE: {
1640               uint8_t  *whiteBalance =
1641                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1642               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1643                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1644                   *whiteBalance);
1645               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1646               break;
1647          }
1648          case CAM_INTF_META_AWB_REGIONS: {
1649             /*awb regions*/
1650             cam_area_t  *hAwbRegions =
1651                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1652             int32_t awbRegions[5];
1653             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1654             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1655             break;
1656          }
1657          case CAM_INTF_META_AWB_STATE: {
1658             uint8_t  *whiteBalanceState =
1659                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1660             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1661             break;
1662          }
1663          case CAM_INTF_META_MODE: {
1664             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1665             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1666             break;
1667          }
1668          case CAM_INTF_META_EDGE_MODE: {
1669             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1670             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1671             break;
1672          }
1673          case CAM_INTF_META_FLASH_POWER: {
1674             uint8_t  *flashPower =
1675                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1676             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1677             break;
1678          }
1679          case CAM_INTF_META_FLASH_FIRING_TIME: {
1680             int64_t  *flashFiringTime =
1681                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1682             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1683             break;
1684          }
1685          case CAM_INTF_META_FLASH_STATE: {
1686             uint8_t  *flashState =
1687                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1688             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1689             break;
1690          }
1691          case CAM_INTF_META_FLASH_MODE:{
1692             uint8_t *flashMode = (uint8_t*)
1693                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1694             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1695             break;
1696          }
1697          case CAM_INTF_META_HOTPIXEL_MODE: {
1698              uint8_t  *hotPixelMode =
1699                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1700              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1701              break;
1702          }
1703          case CAM_INTF_META_LENS_APERTURE:{
1704             float  *lensAperture =
1705                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1706             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1707             break;
1708          }
1709          case CAM_INTF_META_LENS_FILTERDENSITY: {
1710             float  *filterDensity =
1711                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1712             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1713             break;
1714          }
1715          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1716             float  *focalLength =
1717                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1718             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1719             break;
1720          }
1721          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1722             float  *focusDistance =
1723                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1724             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1725             break;
1726          }
1727          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1728             float  *focusRange =
1729                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1730             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1731          }
1732          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1733             uint8_t  *opticalStab =
1734                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1735             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1736          }
1737          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1738             uint8_t  *noiseRedMode =
1739                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1740             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1741             break;
1742          }
1743          case CAM_INTF_META_SCALER_CROP_REGION: {
1744             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1745             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1746             int32_t scalerCropRegion[4];
1747             scalerCropRegion[0] = hScalerCropRegion->left;
1748             scalerCropRegion[1] = hScalerCropRegion->top;
1749             scalerCropRegion[2] = hScalerCropRegion->width;
1750             scalerCropRegion[3] = hScalerCropRegion->height;
1751             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1752             break;
1753          }
1754          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1755             int64_t  *sensorExpTime =
1756                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1757             mMetadataResponse.exposure_time = *sensorExpTime;
1758             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1759             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1760             break;
1761          }
1762          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1763             int64_t  *sensorFameDuration =
1764                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1765             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1766             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1767             break;
1768          }
1769          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1770             int32_t  *sensorSensitivity =
1771                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1772             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1773             mMetadataResponse.iso_speed = *sensorSensitivity;
1774             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1775             break;
1776          }
1777          case CAM_INTF_META_SHADING_MODE: {
1778             uint8_t  *shadingMode =
1779                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1780             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1781             break;
1782          }
1783          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1784             uint8_t  *faceDetectMode =
1785                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1786             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1787                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1788                                                        *faceDetectMode);
1789             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1790             break;
1791          }
1792          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1793             uint8_t  *histogramMode =
1794                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1795             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1796             break;
1797          }
1798          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1799               uint8_t  *sharpnessMapMode =
1800                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1801               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1802                                  sharpnessMapMode, 1);
1803               break;
1804           }
1805          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1806               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1807               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1808               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1809                                  (int32_t*)sharpnessMap->sharpness,
1810                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1811               break;
1812          }
1813          case CAM_INTF_META_LENS_SHADING_MAP: {
1814               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1815               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1816               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1817               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1818               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1819                                  (float*)lensShadingMap->lens_shading,
1820                                  4*map_width*map_height);
1821               break;
1822          }
1823          case CAM_INTF_META_TONEMAP_CURVES:{
1824             //Populate CAM_INTF_META_TONEMAP_CURVES
1825             /* ch0 = G, ch 1 = B, ch 2 = R*/
1826             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1827             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1828             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1829                                (float*)tonemap->curves[0].tonemap_points,
1830                                tonemap->tonemap_points_cnt * 2);
1831
1832             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1833                                (float*)tonemap->curves[1].tonemap_points,
1834                                tonemap->tonemap_points_cnt * 2);
1835
1836             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1837                                (float*)tonemap->curves[2].tonemap_points,
1838                                tonemap->tonemap_points_cnt * 2);
1839             break;
1840          }
1841          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1842             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1843             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1844             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1845             break;
1846          }
1847          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1848              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1849              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1850              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1851                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1852              break;
1853          }
1854          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1855             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1856             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1857             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1858                       predColorCorrectionGains->gains, 4);
1859             break;
1860          }
1861          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1862             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1863                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1864             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1865                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1866             break;
1867
1868          }
1869          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1870             uint8_t *blackLevelLock = (uint8_t*)
1871               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1872             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1873             break;
1874          }
1875          case CAM_INTF_META_SCENE_FLICKER:{
1876             uint8_t *sceneFlicker = (uint8_t*)
1877             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1878             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1879             break;
1880          }
1881          case CAM_INTF_PARM_LED_MODE:
1882             break;
1883          default:
1884             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1885                   __func__, curr_entry);
1886             break;
1887       }
1888       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1889       curr_entry = next_entry;
1890    }
1891    resultMetadata = camMetadata.release();
1892    return resultMetadata;
1893}
1894
1895/*===========================================================================
1896 * FUNCTION   : convertToRegions
1897 *
1898 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1899 *
1900 * PARAMETERS :
1901 *   @rect   : cam_rect_t struct to convert
1902 *   @region : int32_t destination array
1903 *   @weight : if we are converting from cam_area_t, weight is valid
1904 *             else weight = -1
1905 *
1906 *==========================================================================*/
1907void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1908    region[0] = rect.left;
1909    region[1] = rect.top;
1910    region[2] = rect.left + rect.width;
1911    region[3] = rect.top + rect.height;
1912    if (weight > -1) {
1913        region[4] = weight;
1914    }
1915}
1916
1917/*===========================================================================
1918 * FUNCTION   : convertFromRegions
1919 *
1920 * DESCRIPTION: helper method to convert from array to cam_rect_t
1921 *
1922 * PARAMETERS :
1923 *   @rect   : cam_rect_t struct to convert
1924 *   @region : int32_t destination array
1925 *   @weight : if we are converting from cam_area_t, weight is valid
1926 *             else weight = -1
1927 *
1928 *==========================================================================*/
1929void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1930                                                   const camera_metadata_t *settings,
1931                                                   uint32_t tag){
1932    CameraMetadata frame_settings;
1933    frame_settings = settings;
1934    int32_t x_min = frame_settings.find(tag).data.i32[0];
1935    int32_t y_min = frame_settings.find(tag).data.i32[1];
1936    int32_t x_max = frame_settings.find(tag).data.i32[2];
1937    int32_t y_max = frame_settings.find(tag).data.i32[3];
1938    roi->weight = frame_settings.find(tag).data.i32[4];
1939    roi->rect.left = x_min;
1940    roi->rect.top = y_min;
1941    roi->rect.width = x_max - x_min;
1942    roi->rect.height = y_max - y_min;
1943}
1944
1945/*===========================================================================
1946 * FUNCTION   : resetIfNeededROI
1947 *
1948 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1949 *              crop region
1950 *
1951 * PARAMETERS :
1952 *   @roi       : cam_area_t struct to resize
1953 *   @scalerCropRegion : cam_crop_region_t region to compare against
1954 *
1955 *
1956 *==========================================================================*/
1957bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1958                                                 const cam_crop_region_t* scalerCropRegion)
1959{
1960    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1961    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1962    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1963    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1964    if ((roi_x_max < scalerCropRegion->left) ||
1965        (roi_y_max < scalerCropRegion->top)  ||
1966        (roi->rect.left > crop_x_max) ||
1967        (roi->rect.top > crop_y_max)){
1968        return false;
1969    }
1970    if (roi->rect.left < scalerCropRegion->left) {
1971        roi->rect.left = scalerCropRegion->left;
1972    }
1973    if (roi->rect.top < scalerCropRegion->top) {
1974        roi->rect.top = scalerCropRegion->top;
1975    }
1976    if (roi_x_max > crop_x_max) {
1977        roi_x_max = crop_x_max;
1978    }
1979    if (roi_y_max > crop_y_max) {
1980        roi_y_max = crop_y_max;
1981    }
1982    roi->rect.width = roi_x_max - roi->rect.left;
1983    roi->rect.height = roi_y_max - roi->rect.top;
1984    return true;
1985}
1986
1987/*===========================================================================
1988 * FUNCTION   : convertLandmarks
1989 *
1990 * DESCRIPTION: helper method to extract the landmarks from face detection info
1991 *
1992 * PARAMETERS :
1993 *   @face   : cam_rect_t struct to convert
1994 *   @landmarks : int32_t destination array
1995 *
1996 *
1997 *==========================================================================*/
1998void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1999{
2000    landmarks[0] = face.left_eye_center.x;
2001    landmarks[1] = face.left_eye_center.y;
2002    landmarks[2] = face.right_eye_center.y;
2003    landmarks[3] = face.right_eye_center.y;
2004    landmarks[4] = face.mouth_center.x;
2005    landmarks[5] = face.mouth_center.y;
2006}
2007
2008#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2009/*===========================================================================
2010 * FUNCTION   : initCapabilities
2011 *
2012 * DESCRIPTION: initialize camera capabilities in static data struct
2013 *
2014 * PARAMETERS :
2015 *   @cameraId  : camera Id
2016 *
2017 * RETURN     : int32_t type of status
2018 *              NO_ERROR  -- success
2019 *              none-zero failure code
2020 *==========================================================================*/
2021int QCamera3HardwareInterface::initCapabilities(int cameraId)
2022{
2023    int rc = 0;
2024    mm_camera_vtbl_t *cameraHandle = NULL;
2025    QCamera3HeapMemory *capabilityHeap = NULL;
2026
2027    cameraHandle = camera_open(cameraId);
2028    if (!cameraHandle) {
2029        ALOGE("%s: camera_open failed", __func__);
2030        rc = -1;
2031        goto open_failed;
2032    }
2033
2034    capabilityHeap = new QCamera3HeapMemory();
2035    if (capabilityHeap == NULL) {
2036        ALOGE("%s: creation of capabilityHeap failed", __func__);
2037        goto heap_creation_failed;
2038    }
2039    /* Allocate memory for capability buffer */
2040    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2041    if(rc != OK) {
2042        ALOGE("%s: No memory for cappability", __func__);
2043        goto allocate_failed;
2044    }
2045
2046    /* Map memory for capability buffer */
2047    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2048    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2049                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2050                                capabilityHeap->getFd(0),
2051                                sizeof(cam_capability_t));
2052    if(rc < 0) {
2053        ALOGE("%s: failed to map capability buffer", __func__);
2054        goto map_failed;
2055    }
2056
2057    /* Query Capability */
2058    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2059    if(rc < 0) {
2060        ALOGE("%s: failed to query capability",__func__);
2061        goto query_failed;
2062    }
2063    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2064    if (!gCamCapability[cameraId]) {
2065        ALOGE("%s: out of memory", __func__);
2066        goto query_failed;
2067    }
2068    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2069                                        sizeof(cam_capability_t));
2070    rc = 0;
2071
2072query_failed:
2073    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2074                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2075map_failed:
2076    capabilityHeap->deallocate();
2077allocate_failed:
2078    delete capabilityHeap;
2079heap_creation_failed:
2080    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2081    cameraHandle = NULL;
2082open_failed:
2083    return rc;
2084}
2085
2086/*===========================================================================
2087 * FUNCTION   : initParameters
2088 *
2089 * DESCRIPTION: initialize camera parameters
2090 *
2091 * PARAMETERS :
2092 *
2093 * RETURN     : int32_t type of status
2094 *              NO_ERROR  -- success
2095 *              none-zero failure code
2096 *==========================================================================*/
2097int QCamera3HardwareInterface::initParameters()
2098{
2099    int rc = 0;
2100
2101    //Allocate Set Param Buffer
2102    mParamHeap = new QCamera3HeapMemory();
2103    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2104    if(rc != OK) {
2105        rc = NO_MEMORY;
2106        ALOGE("Failed to allocate SETPARM Heap memory");
2107        delete mParamHeap;
2108        mParamHeap = NULL;
2109        return rc;
2110    }
2111
2112    //Map memory for parameters buffer
2113    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2114            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2115            mParamHeap->getFd(0),
2116            sizeof(parm_buffer_t));
2117    if(rc < 0) {
2118        ALOGE("%s:failed to map SETPARM buffer",__func__);
2119        rc = FAILED_TRANSACTION;
2120        mParamHeap->deallocate();
2121        delete mParamHeap;
2122        mParamHeap = NULL;
2123        return rc;
2124    }
2125
2126    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2127    return rc;
2128}
2129
2130/*===========================================================================
2131 * FUNCTION   : deinitParameters
2132 *
2133 * DESCRIPTION: de-initialize camera parameters
2134 *
2135 * PARAMETERS :
2136 *
2137 * RETURN     : NONE
2138 *==========================================================================*/
2139void QCamera3HardwareInterface::deinitParameters()
2140{
2141    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2142            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2143
2144    mParamHeap->deallocate();
2145    delete mParamHeap;
2146    mParamHeap = NULL;
2147
2148    mParameters = NULL;
2149}
2150
2151/*===========================================================================
2152 * FUNCTION   : calcMaxJpegSize
2153 *
2154 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2155 *
2156 * PARAMETERS :
2157 *
2158 * RETURN     : max_jpeg_size
2159 *==========================================================================*/
2160int QCamera3HardwareInterface::calcMaxJpegSize()
2161{
2162    int32_t max_jpeg_size = 0;
2163    int temp_width, temp_height;
2164    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2165        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2166        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2167        if (temp_width * temp_height > max_jpeg_size ) {
2168            max_jpeg_size = temp_width * temp_height;
2169        }
2170    }
2171    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2172    return max_jpeg_size;
2173}
2174
2175/*===========================================================================
2176 * FUNCTION   : initStaticMetadata
2177 *
2178 * DESCRIPTION: initialize the static metadata
2179 *
2180 * PARAMETERS :
2181 *   @cameraId  : camera Id
2182 *
2183 * RETURN     : int32_t type of status
2184 *              0  -- success
2185 *              non-zero failure code
2186 *==========================================================================*/
2187int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2188{
2189    int rc = 0;
2190    CameraMetadata staticInfo;
2191
2192    /* android.info: hardware level */
2193    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2194    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2195        &supportedHardwareLevel, 1);
2196
2197    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2198    /*HAL 3 only*/
2199    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2200                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2201
2202    /*hard coded for now but this should come from sensor*/
2203    float min_focus_distance;
2204    if(facingBack){
2205        min_focus_distance = 10;
2206    } else {
2207        min_focus_distance = 0;
2208    }
2209    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2210                    &min_focus_distance, 1);
2211
2212    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2213                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2214
2215    /*should be using focal lengths but sensor doesn't provide that info now*/
2216    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2217                      &gCamCapability[cameraId]->focal_length,
2218                      1);
2219
2220    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2221                      gCamCapability[cameraId]->apertures,
2222                      gCamCapability[cameraId]->apertures_count);
2223
2224    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2225                gCamCapability[cameraId]->filter_densities,
2226                gCamCapability[cameraId]->filter_densities_count);
2227
2228
2229    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2230                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2231                      gCamCapability[cameraId]->optical_stab_modes_count);
2232
2233    staticInfo.update(ANDROID_LENS_POSITION,
2234                      gCamCapability[cameraId]->lens_position,
2235                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2236
2237    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2238                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2239    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2240                      lens_shading_map_size,
2241                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2242
2243    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2244                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2245    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2246            geo_correction_map_size,
2247            sizeof(geo_correction_map_size)/sizeof(int32_t));
2248
2249    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2250                       gCamCapability[cameraId]->geo_correction_map,
2251                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2252
2253    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2254            gCamCapability[cameraId]->sensor_physical_size, 2);
2255
2256    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2257            gCamCapability[cameraId]->exposure_time_range, 2);
2258
2259    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2260            &gCamCapability[cameraId]->max_frame_duration, 1);
2261
2262    camera_metadata_rational baseGainFactor = {
2263            gCamCapability[cameraId]->base_gain_factor.numerator,
2264            gCamCapability[cameraId]->base_gain_factor.denominator};
2265    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2266                      &baseGainFactor, 1);
2267
2268    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2269                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2270
2271    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2272                                               gCamCapability[cameraId]->pixel_array_size.height};
2273    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2274                      pixel_array_size, 2);
2275
2276    int32_t active_array_size[] = {0, 0,
2277                                                gCamCapability[cameraId]->active_array_size.width,
2278                                                gCamCapability[cameraId]->active_array_size.height};
2279    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2280                      active_array_size, 4);
2281
2282    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2283            &gCamCapability[cameraId]->white_level, 1);
2284
2285    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2286            gCamCapability[cameraId]->black_level_pattern, 4);
2287
2288    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2289                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2290
2291    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2292                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2293
2294    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2295                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2296
2297    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2298                      &gCamCapability[cameraId]->histogram_size, 1);
2299
2300    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2301            &gCamCapability[cameraId]->max_histogram_count, 1);
2302
2303    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2304                                                gCamCapability[cameraId]->sharpness_map_size.height};
2305
2306    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2307            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2308
2309    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2310            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2311
2312
2313    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2314                      &gCamCapability[cameraId]->raw_min_duration,
2315                       1);
2316
2317    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2318                                                HAL_PIXEL_FORMAT_BLOB};
2319    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2320    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2321                      scalar_formats,
2322                      scalar_formats_count);
2323
2324    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2325    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2326              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2327              available_processed_sizes);
2328    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2329                available_processed_sizes,
2330                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2331
2332    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2333                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2334                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2335
2336    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2337    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2338                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2339                 available_fps_ranges);
2340    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2341            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2342
2343    camera_metadata_rational exposureCompensationStep = {
2344            gCamCapability[cameraId]->exp_compensation_step.numerator,
2345            gCamCapability[cameraId]->exp_compensation_step.denominator};
2346    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2347                      &exposureCompensationStep, 1);
2348
2349    /*TO DO*/
2350    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2351    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2352                      availableVstabModes, sizeof(availableVstabModes));
2353
2354    /*HAL 1 and HAL 3 common*/
2355    float maxZoom = 4;
2356    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2357            &maxZoom, 1);
2358
2359    int32_t max3aRegions = 1;
2360    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2361            &max3aRegions, 1);
2362
2363    uint8_t availableFaceDetectModes[] = {
2364            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2365            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2366    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2367                      availableFaceDetectModes,
2368                      sizeof(availableFaceDetectModes));
2369
2370    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2371                                                        gCamCapability[cameraId]->exposure_compensation_max};
2372    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2373            exposureCompensationRange,
2374            sizeof(exposureCompensationRange)/sizeof(int32_t));
2375
2376    uint8_t lensFacing = (facingBack) ?
2377            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2378    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2379
2380    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2381                available_processed_sizes,
2382                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2383
2384    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2385                      available_thumbnail_sizes,
2386                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2387
2388    int32_t max_jpeg_size = 0;
2389    int temp_width, temp_height;
2390    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2391        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2392        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2393        if (temp_width * temp_height > max_jpeg_size ) {
2394            max_jpeg_size = temp_width * temp_height;
2395        }
2396    }
2397    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2398    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2399                      &max_jpeg_size, 1);
2400
2401    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2402    int32_t size = 0;
2403    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2404        int val = lookupFwkName(EFFECT_MODES_MAP,
2405                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2406                                   gCamCapability[cameraId]->supported_effects[i]);
2407        if (val != NAME_NOT_FOUND) {
2408            avail_effects[size] = (uint8_t)val;
2409            size++;
2410        }
2411    }
2412    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2413                      avail_effects,
2414                      size);
2415
2416    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2417    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2418    int32_t supported_scene_modes_cnt = 0;
2419    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2420        int val = lookupFwkName(SCENE_MODES_MAP,
2421                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2422                                gCamCapability[cameraId]->supported_scene_modes[i]);
2423        if (val != NAME_NOT_FOUND) {
2424            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2425            supported_indexes[supported_scene_modes_cnt] = i;
2426            supported_scene_modes_cnt++;
2427        }
2428    }
2429
2430    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2431                      avail_scene_modes,
2432                      supported_scene_modes_cnt);
2433
2434    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2435    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2436                      supported_scene_modes_cnt,
2437                      scene_mode_overrides,
2438                      supported_indexes,
2439                      cameraId);
2440    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2441                      scene_mode_overrides,
2442                      supported_scene_modes_cnt*3);
2443
2444    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2445    size = 0;
2446    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2447        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2448                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2449                                 gCamCapability[cameraId]->supported_antibandings[i]);
2450        if (val != NAME_NOT_FOUND) {
2451            avail_antibanding_modes[size] = (uint8_t)val;
2452            size++;
2453        }
2454
2455    }
2456    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2457                      avail_antibanding_modes,
2458                      size);
2459
2460    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2461    size = 0;
2462    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2463        int val = lookupFwkName(FOCUS_MODES_MAP,
2464                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2465                                gCamCapability[cameraId]->supported_focus_modes[i]);
2466        if (val != NAME_NOT_FOUND) {
2467            avail_af_modes[size] = (uint8_t)val;
2468            size++;
2469        }
2470    }
2471    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2472                      avail_af_modes,
2473                      size);
2474
2475    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2476    size = 0;
2477    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2478        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2479                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2480                                    gCamCapability[cameraId]->supported_white_balances[i]);
2481        if (val != NAME_NOT_FOUND) {
2482            avail_awb_modes[size] = (uint8_t)val;
2483            size++;
2484        }
2485    }
2486    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2487                      avail_awb_modes,
2488                      size);
2489
2490    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2491    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2492      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2493
2494    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2495            available_flash_levels,
2496            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2497
2498
2499    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2500    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2501            &flashAvailable, 1);
2502
2503    uint8_t avail_ae_modes[5];
2504    size = 0;
2505    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2506        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2507        size++;
2508    }
2509    if (flashAvailable) {
2510        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2511        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2512        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2513    }
2514    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2515                      avail_ae_modes,
2516                      size);
2517
2518    int32_t sensitivity_range[2];
2519    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2520    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2521    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2522                      sensitivity_range,
2523                      sizeof(sensitivity_range) / sizeof(int32_t));
2524
2525    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2526                      &gCamCapability[cameraId]->max_analog_sensitivity,
2527                      1);
2528
2529    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2530                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2531                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2532
2533    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2534    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2535                      &sensor_orientation,
2536                      1);
2537
2538    int32_t max_output_streams[3] = {1, 3, 1};
2539    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2540                      max_output_streams,
2541                      3);
2542
2543    gStaticMetadata[cameraId] = staticInfo.release();
2544    return rc;
2545}
2546
2547/*===========================================================================
2548 * FUNCTION   : makeTable
2549 *
2550 * DESCRIPTION: make a table of sizes
2551 *
2552 * PARAMETERS :
2553 *
2554 *
2555 *==========================================================================*/
2556void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2557                                          int32_t* sizeTable)
2558{
2559    int j = 0;
2560    for (int i = 0; i < size; i++) {
2561        sizeTable[j] = dimTable[i].width;
2562        sizeTable[j+1] = dimTable[i].height;
2563        j+=2;
2564    }
2565}
2566
2567/*===========================================================================
2568 * FUNCTION   : makeFPSTable
2569 *
2570 * DESCRIPTION: make a table of fps ranges
2571 *
2572 * PARAMETERS :
2573 *
2574 *==========================================================================*/
2575void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2576                                          int32_t* fpsRangesTable)
2577{
2578    int j = 0;
2579    for (int i = 0; i < size; i++) {
2580        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2581        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2582        j+=2;
2583    }
2584}
2585
2586/*===========================================================================
2587 * FUNCTION   : makeOverridesList
2588 *
2589 * DESCRIPTION: make a list of scene mode overrides
2590 *
2591 * PARAMETERS :
2592 *
2593 *
2594 *==========================================================================*/
2595void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2596                                                  uint8_t size, uint8_t* overridesList,
2597                                                  uint8_t* supported_indexes,
2598                                                  int camera_id)
2599{
2600    /*daemon will give a list of overrides for all scene modes.
2601      However we should send the fwk only the overrides for the scene modes
2602      supported by the framework*/
2603    int j = 0, index = 0, supt = 0;
2604    uint8_t focus_override;
2605    for (int i = 0; i < size; i++) {
2606        supt = 0;
2607        index = supported_indexes[i];
2608        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2609        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2610                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2611                                                    overridesTable[index].awb_mode);
2612        focus_override = (uint8_t)overridesTable[index].af_mode;
2613        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2614           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2615              supt = 1;
2616              break;
2617           }
2618        }
2619        if (supt) {
2620           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2621                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2622                                              focus_override);
2623        } else {
2624           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2625        }
2626        j+=3;
2627    }
2628}
2629
2630/*===========================================================================
2631 * FUNCTION   : getPreviewHalPixelFormat
2632 *
2633 * DESCRIPTION: convert the format to type recognized by framework
2634 *
2635 * PARAMETERS : format : the format from backend
2636 *
2637 ** RETURN    : format recognized by framework
2638 *
2639 *==========================================================================*/
2640int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2641{
2642    int32_t halPixelFormat;
2643
2644    switch (format) {
2645    case CAM_FORMAT_YUV_420_NV12:
2646        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2647        break;
2648    case CAM_FORMAT_YUV_420_NV21:
2649        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2650        break;
2651    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2652        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2653        break;
2654    case CAM_FORMAT_YUV_420_YV12:
2655        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2656        break;
2657    case CAM_FORMAT_YUV_422_NV16:
2658    case CAM_FORMAT_YUV_422_NV61:
2659    default:
2660        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2661        break;
2662    }
2663    return halPixelFormat;
2664}
2665
2666/*===========================================================================
2667 * FUNCTION   : getSensorSensitivity
2668 *
2669 * DESCRIPTION: convert iso_mode to an integer value
2670 *
2671 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2672 *
2673 ** RETURN    : sensitivity supported by sensor
2674 *
2675 *==========================================================================*/
2676int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2677{
2678    int32_t sensitivity;
2679
2680    switch (iso_mode) {
2681    case CAM_ISO_MODE_100:
2682        sensitivity = 100;
2683        break;
2684    case CAM_ISO_MODE_200:
2685        sensitivity = 200;
2686        break;
2687    case CAM_ISO_MODE_400:
2688        sensitivity = 400;
2689        break;
2690    case CAM_ISO_MODE_800:
2691        sensitivity = 800;
2692        break;
2693    case CAM_ISO_MODE_1600:
2694        sensitivity = 1600;
2695        break;
2696    default:
2697        sensitivity = -1;
2698        break;
2699    }
2700    return sensitivity;
2701}
2702
2703
2704/*===========================================================================
2705 * FUNCTION   : AddSetParmEntryToBatch
2706 *
2707 * DESCRIPTION: add set parameter entry into batch
2708 *
2709 * PARAMETERS :
2710 *   @p_table     : ptr to parameter buffer
2711 *   @paramType   : parameter type
2712 *   @paramLength : length of parameter value
2713 *   @paramValue  : ptr to parameter value
2714 *
2715 * RETURN     : int32_t type of status
2716 *              NO_ERROR  -- success
2717 *              none-zero failure code
2718 *==========================================================================*/
2719int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2720                                                          cam_intf_parm_type_t paramType,
2721                                                          uint32_t paramLength,
2722                                                          void *paramValue)
2723{
2724    int position = paramType;
2725    int current, next;
2726
2727    /*************************************************************************
2728    *                 Code to take care of linking next flags                *
2729    *************************************************************************/
2730    current = GET_FIRST_PARAM_ID(p_table);
2731    if (position == current){
2732        //DO NOTHING
2733    } else if (position < current){
2734        SET_NEXT_PARAM_ID(position, p_table, current);
2735        SET_FIRST_PARAM_ID(p_table, position);
2736    } else {
2737        /* Search for the position in the linked list where we need to slot in*/
2738        while (position > GET_NEXT_PARAM_ID(current, p_table))
2739            current = GET_NEXT_PARAM_ID(current, p_table);
2740
2741        /*If node already exists no need to alter linking*/
2742        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2743            next = GET_NEXT_PARAM_ID(current, p_table);
2744            SET_NEXT_PARAM_ID(current, p_table, position);
2745            SET_NEXT_PARAM_ID(position, p_table, next);
2746        }
2747    }
2748
2749    /*************************************************************************
2750    *                   Copy contents into entry                             *
2751    *************************************************************************/
2752
2753    if (paramLength > sizeof(parm_type_t)) {
2754        ALOGE("%s:Size of input larger than max entry size",__func__);
2755        return BAD_VALUE;
2756    }
2757    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2758    return NO_ERROR;
2759}
2760
2761/*===========================================================================
2762 * FUNCTION   : lookupFwkName
2763 *
2764 * DESCRIPTION: In case the enum is not same in fwk and backend
2765 *              make sure the parameter is correctly propogated
2766 *
2767 * PARAMETERS  :
2768 *   @arr      : map between the two enums
2769 *   @len      : len of the map
2770 *   @hal_name : name of the hal_parm to map
2771 *
2772 * RETURN     : int type of status
2773 *              fwk_name  -- success
2774 *              none-zero failure code
2775 *==========================================================================*/
2776int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2777                                             int len, int hal_name)
2778{
2779
2780    for (int i = 0; i < len; i++) {
2781        if (arr[i].hal_name == hal_name)
2782            return arr[i].fwk_name;
2783    }
2784
2785    /* Not able to find matching framework type is not necessarily
2786     * an error case. This happens when mm-camera supports more attributes
2787     * than the frameworks do */
2788    ALOGD("%s: Cannot find matching framework type", __func__);
2789    return NAME_NOT_FOUND;
2790}
2791
2792/*===========================================================================
2793 * FUNCTION   : lookupHalName
2794 *
2795 * DESCRIPTION: In case the enum is not same in fwk and backend
2796 *              make sure the parameter is correctly propogated
2797 *
2798 * PARAMETERS  :
2799 *   @arr      : map between the two enums
2800 *   @len      : len of the map
2801 *   @fwk_name : name of the hal_parm to map
2802 *
2803 * RETURN     : int32_t type of status
2804 *              hal_name  -- success
2805 *              none-zero failure code
2806 *==========================================================================*/
2807int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2808                                             int len, int fwk_name)
2809{
2810    for (int i = 0; i < len; i++) {
2811       if (arr[i].fwk_name == fwk_name)
2812           return arr[i].hal_name;
2813    }
2814    ALOGE("%s: Cannot find matching hal type", __func__);
2815    return NAME_NOT_FOUND;
2816}
2817
2818/*===========================================================================
2819 * FUNCTION   : getCapabilities
2820 *
2821 * DESCRIPTION: query camera capabilities
2822 *
2823 * PARAMETERS :
2824 *   @cameraId  : camera Id
2825 *   @info      : camera info struct to be filled in with camera capabilities
2826 *
2827 * RETURN     : int32_t type of status
2828 *              NO_ERROR  -- success
2829 *              none-zero failure code
2830 *==========================================================================*/
2831int QCamera3HardwareInterface::getCamInfo(int cameraId,
2832                                    struct camera_info *info)
2833{
2834    int rc = 0;
2835
2836    if (NULL == gCamCapability[cameraId]) {
2837        rc = initCapabilities(cameraId);
2838        if (rc < 0) {
2839            //pthread_mutex_unlock(&g_camlock);
2840            return rc;
2841        }
2842    }
2843
2844    if (NULL == gStaticMetadata[cameraId]) {
2845        rc = initStaticMetadata(cameraId);
2846        if (rc < 0) {
2847            return rc;
2848        }
2849    }
2850
2851    switch(gCamCapability[cameraId]->position) {
2852    case CAM_POSITION_BACK:
2853        info->facing = CAMERA_FACING_BACK;
2854        break;
2855
2856    case CAM_POSITION_FRONT:
2857        info->facing = CAMERA_FACING_FRONT;
2858        break;
2859
2860    default:
2861        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2862        rc = -1;
2863        break;
2864    }
2865
2866
2867    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2868    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2869    info->static_camera_characteristics = gStaticMetadata[cameraId];
2870
2871    return rc;
2872}
2873
2874/*===========================================================================
2875 * FUNCTION   : translateMetadata
2876 *
2877 * DESCRIPTION: translate the metadata into camera_metadata_t
2878 *
2879 * PARAMETERS : type of the request
2880 *
2881 *
2882 * RETURN     : success: camera_metadata_t*
2883 *              failure: NULL
2884 *
2885 *==========================================================================*/
2886camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2887{
2888    pthread_mutex_lock(&mMutex);
2889
2890    if (mDefaultMetadata[type] != NULL) {
2891        pthread_mutex_unlock(&mMutex);
2892        return mDefaultMetadata[type];
2893    }
2894    //first time we are handling this request
2895    //fill up the metadata structure using the wrapper class
2896    CameraMetadata settings;
2897    //translate from cam_capability_t to camera_metadata_tag_t
2898    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2899    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2900    int32_t defaultRequestID = 0;
2901    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
2902
2903    /*control*/
2904
2905    uint8_t controlIntent = 0;
2906    switch (type) {
2907      case CAMERA3_TEMPLATE_PREVIEW:
2908        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2909        break;
2910      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2911        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2912        break;
2913      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2914        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2915        break;
2916      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2917        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2918        break;
2919      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2920        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2921        break;
2922      default:
2923        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2924        break;
2925    }
2926    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2927
2928    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2929            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2930
2931    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2932    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2933
2934    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2935    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2936
2937    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2938    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2939
2940    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2941    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2942
2943    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2944    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2945
2946    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2947    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2948
2949    static uint8_t focusMode;
2950    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2951        ALOGE("%s: Setting focus mode to auto", __func__);
2952        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2953    } else {
2954        ALOGE("%s: Setting focus mode to off", __func__);
2955        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2956    }
2957    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2958
2959    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2960    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2961
2962    /*flash*/
2963    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2964    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2965
2966    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2967    settings.update(ANDROID_FLASH_FIRING_POWER,
2968            &flashFiringLevel, 1);
2969
2970    /* lens */
2971    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2972    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2973
2974    if (gCamCapability[mCameraId]->filter_densities_count) {
2975        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2976        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2977                        gCamCapability[mCameraId]->filter_densities_count);
2978    }
2979
2980    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2981    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2982
2983    /* Exposure time(Update the Min Exposure Time)*/
2984    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
2985    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
2986
2987    /* frame duration */
2988    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2989    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2990
2991    /* sensitivity */
2992    static const int32_t default_sensitivity = 100;
2993    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2994
2995    /*edge mode*/
2996    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2997    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2998
2999    /*noise reduction mode*/
3000    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3001    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3002
3003    /*color correction mode*/
3004    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3005    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3006
3007    /*transform matrix mode*/
3008    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3009    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3010
3011    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3012    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3013
3014    mDefaultMetadata[type] = settings.release();
3015
3016    pthread_mutex_unlock(&mMutex);
3017    return mDefaultMetadata[type];
3018}
3019
3020/*===========================================================================
3021 * FUNCTION   : setFrameParameters
3022 *
3023 * DESCRIPTION: set parameters per frame as requested in the metadata from
3024 *              framework
3025 *
3026 * PARAMETERS :
3027 *   @request   : request that needs to be serviced
3028 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3029 *
3030 * RETURN     : success: NO_ERROR
3031 *              failure:
3032 *==========================================================================*/
3033int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3034                    uint32_t streamTypeMask)
3035{
3036    /*translate from camera_metadata_t type to parm_type_t*/
3037    int rc = 0;
3038    if (request->settings == NULL && mFirstRequest) {
3039        /*settings cannot be null for the first request*/
3040        return BAD_VALUE;
3041    }
3042
3043    int32_t hal_version = CAM_HAL_V3;
3044
3045    memset(mParameters, 0, sizeof(parm_buffer_t));
3046    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3047    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3048                sizeof(hal_version), &hal_version);
3049    if (rc < 0) {
3050        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3051        return BAD_VALUE;
3052    }
3053
3054    /*we need to update the frame number in the parameters*/
3055    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3056                                sizeof(request->frame_number), &(request->frame_number));
3057    if (rc < 0) {
3058        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3059        return BAD_VALUE;
3060    }
3061
3062    /* Update stream id mask where buffers are requested */
3063    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3064                                sizeof(streamTypeMask), &streamTypeMask);
3065    if (rc < 0) {
3066        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3067        return BAD_VALUE;
3068    }
3069
3070    if(request->settings != NULL){
3071        rc = translateMetadataToParameters(request);
3072    }
3073    /*set the parameters to backend*/
3074    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3075    return rc;
3076}
3077
3078/*===========================================================================
3079 * FUNCTION   : translateMetadataToParameters
3080 *
3081 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3082 *
3083 *
3084 * PARAMETERS :
3085 *   @request  : request sent from framework
3086 *
3087 *
3088 * RETURN     : success: NO_ERROR
3089 *              failure:
3090 *==========================================================================*/
3091int QCamera3HardwareInterface::translateMetadataToParameters
3092                                  (const camera3_capture_request_t *request)
3093{
3094    int rc = 0;
3095    CameraMetadata frame_settings;
3096    frame_settings = request->settings;
3097
3098    /* Do not change the order of the following list unless you know what you are
3099     * doing.
3100     * The order is laid out in such a way that parameters in the front of the table
3101     * may be used to override the parameters later in the table. Examples are:
3102     * 1. META_MODE should precede AEC/AWB/AF MODE
3103     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3104     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3105     * 4. Any mode should precede it's corresponding settings
3106     */
3107    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3108        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3109        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3110                sizeof(metaMode), &metaMode);
3111        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3112           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3113           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3114                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3115                                             fwk_sceneMode);
3116           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3117                sizeof(sceneMode), &sceneMode);
3118        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3119           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3120           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3121                sizeof(sceneMode), &sceneMode);
3122        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3123           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3124           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3125                sizeof(sceneMode), &sceneMode);
3126        }
3127    }
3128
3129    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3130        uint8_t fwk_aeMode =
3131            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3132        uint8_t aeMode;
3133        int32_t redeye;
3134
3135        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3136            aeMode = CAM_AE_MODE_OFF;
3137        } else {
3138            aeMode = CAM_AE_MODE_ON;
3139        }
3140        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3141            redeye = 1;
3142        } else {
3143            redeye = 0;
3144        }
3145
3146        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3147                                          sizeof(AE_FLASH_MODE_MAP),
3148                                          fwk_aeMode);
3149        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3150                sizeof(aeMode), &aeMode);
3151        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3152                sizeof(flashMode), &flashMode);
3153        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3154                sizeof(redeye), &redeye);
3155    }
3156
3157    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3158        uint8_t fwk_whiteLevel =
3159            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3160        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3161                sizeof(WHITE_BALANCE_MODES_MAP),
3162                fwk_whiteLevel);
3163        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3164                sizeof(whiteLevel), &whiteLevel);
3165    }
3166
3167    float focalDistance = -1.0;
3168    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3169        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3170        rc = AddSetParmEntryToBatch(mParameters,
3171                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3172                sizeof(focalDistance), &focalDistance);
3173    }
3174
3175    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3176        uint8_t fwk_focusMode =
3177            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3178        uint8_t focusMode;
3179        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3180            focusMode = CAM_FOCUS_MODE_INFINITY;
3181        } else{
3182         focusMode = lookupHalName(FOCUS_MODES_MAP,
3183                                   sizeof(FOCUS_MODES_MAP),
3184                                   fwk_focusMode);
3185        }
3186        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3187                sizeof(focusMode), &focusMode);
3188    }
3189
3190    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3191        int32_t antibandingMode =
3192            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3193        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3194                sizeof(antibandingMode), &antibandingMode);
3195    }
3196
3197    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3198        int32_t expCompensation = frame_settings.find(
3199            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3200        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3201            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3202        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3203            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3204        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3205          sizeof(expCompensation), &expCompensation);
3206    }
3207
3208    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3209        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3210        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3211                sizeof(aeLock), &aeLock);
3212    }
3213    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3214        cam_fps_range_t fps_range;
3215        fps_range.min_fps =
3216            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3217        fps_range.max_fps =
3218            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3219        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3220                sizeof(fps_range), &fps_range);
3221    }
3222
3223    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3224        uint8_t awbLock =
3225            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3226        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3227                sizeof(awbLock), &awbLock);
3228    }
3229
3230    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3231        uint8_t fwk_effectMode =
3232            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3233        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3234                sizeof(EFFECT_MODES_MAP),
3235                fwk_effectMode);
3236        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3237                sizeof(effectMode), &effectMode);
3238    }
3239
3240    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3241        uint8_t colorCorrectMode =
3242            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3243        rc =
3244            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3245                    sizeof(colorCorrectMode), &colorCorrectMode);
3246    }
3247
3248    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3249        cam_color_correct_gains_t colorCorrectGains;
3250        for (int i = 0; i < 4; i++) {
3251            colorCorrectGains.gains[i] =
3252                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3253        }
3254        rc =
3255            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3256                    sizeof(colorCorrectGains), &colorCorrectGains);
3257    }
3258
3259    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3260        cam_color_correct_matrix_t colorCorrectTransform;
3261        cam_rational_type_t transform_elem;
3262        int num = 0;
3263        for (int i = 0; i < 3; i++) {
3264           for (int j = 0; j < 3; j++) {
3265              transform_elem.numerator =
3266                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3267              transform_elem.denominator =
3268                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3269              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3270              num++;
3271           }
3272        }
3273        rc =
3274            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3275                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3276    }
3277
3278    cam_trigger_t aecTrigger;
3279    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3280    aecTrigger.trigger_id = -1;
3281    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3282        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3283        aecTrigger.trigger =
3284            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3285        aecTrigger.trigger_id =
3286            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3287    }
3288    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3289                                sizeof(aecTrigger), &aecTrigger);
3290
3291    /*af_trigger must come with a trigger id*/
3292    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3293        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3294        cam_trigger_t af_trigger;
3295        af_trigger.trigger =
3296            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3297        af_trigger.trigger_id =
3298            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3299        rc = AddSetParmEntryToBatch(mParameters,
3300                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3301    }
3302
3303    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3304        int32_t demosaic =
3305            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3306        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3307                sizeof(demosaic), &demosaic);
3308    }
3309
3310    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3311        cam_edge_application_t edge_application;
3312        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3313        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3314            edge_application.sharpness = 0;
3315        } else {
3316            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3317                int32_t edgeStrength =
3318                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3319                edge_application.sharpness = edgeStrength;
3320            } else {
3321                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3322            }
3323        }
3324        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3325                sizeof(edge_application), &edge_application);
3326    }
3327
3328    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3329        int32_t respectFlashMode = 1;
3330        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3331            uint8_t fwk_aeMode =
3332                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3333            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3334                respectFlashMode = 0;
3335                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3336                    __func__);
3337            }
3338        }
3339        if (respectFlashMode) {
3340            uint8_t flashMode =
3341                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3342            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3343                                          sizeof(FLASH_MODES_MAP),
3344                                          flashMode);
3345            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3346            // To check: CAM_INTF_META_FLASH_MODE usage
3347            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3348                          sizeof(flashMode), &flashMode);
3349        }
3350    }
3351
3352    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3353        uint8_t flashPower =
3354            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3355        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3356                sizeof(flashPower), &flashPower);
3357    }
3358
3359    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3360        int64_t flashFiringTime =
3361            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3362        rc = AddSetParmEntryToBatch(mParameters,
3363                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3364    }
3365
3366    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3367        uint8_t geometricMode =
3368            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3369        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3370                sizeof(geometricMode), &geometricMode);
3371    }
3372
3373    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3374        uint8_t geometricStrength =
3375            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3376        rc = AddSetParmEntryToBatch(mParameters,
3377                CAM_INTF_META_GEOMETRIC_STRENGTH,
3378                sizeof(geometricStrength), &geometricStrength);
3379    }
3380
3381    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3382        uint8_t hotPixelMode =
3383            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3384        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3385                sizeof(hotPixelMode), &hotPixelMode);
3386    }
3387
3388    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3389        float lensAperture =
3390            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3391        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3392                sizeof(lensAperture), &lensAperture);
3393    }
3394
3395    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3396        float filterDensity =
3397            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3398        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3399                sizeof(filterDensity), &filterDensity);
3400    }
3401
3402    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3403        float focalLength =
3404            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3405        rc = AddSetParmEntryToBatch(mParameters,
3406                CAM_INTF_META_LENS_FOCAL_LENGTH,
3407                sizeof(focalLength), &focalLength);
3408    }
3409
3410    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3411        uint8_t optStabMode =
3412            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3413        rc = AddSetParmEntryToBatch(mParameters,
3414                CAM_INTF_META_LENS_OPT_STAB_MODE,
3415                sizeof(optStabMode), &optStabMode);
3416    }
3417
3418    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3419        uint8_t noiseRedMode =
3420            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3421        rc = AddSetParmEntryToBatch(mParameters,
3422                CAM_INTF_META_NOISE_REDUCTION_MODE,
3423                sizeof(noiseRedMode), &noiseRedMode);
3424    }
3425
3426    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3427        uint8_t noiseRedStrength =
3428            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3429        rc = AddSetParmEntryToBatch(mParameters,
3430                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3431                sizeof(noiseRedStrength), &noiseRedStrength);
3432    }
3433
3434    cam_crop_region_t scalerCropRegion;
3435    bool scalerCropSet = false;
3436    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3437        scalerCropRegion.left =
3438            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3439        scalerCropRegion.top =
3440            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3441        scalerCropRegion.width =
3442            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3443        scalerCropRegion.height =
3444            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3445        rc = AddSetParmEntryToBatch(mParameters,
3446                CAM_INTF_META_SCALER_CROP_REGION,
3447                sizeof(scalerCropRegion), &scalerCropRegion);
3448        scalerCropSet = true;
3449    }
3450
3451    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3452        int64_t sensorExpTime =
3453            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3454        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3455        rc = AddSetParmEntryToBatch(mParameters,
3456                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3457                sizeof(sensorExpTime), &sensorExpTime);
3458    }
3459
3460    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3461        int64_t sensorFrameDuration =
3462            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3463        int64_t minFrameDuration = getMinFrameDuration(request);
3464        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3465        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3466            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3467        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3468        rc = AddSetParmEntryToBatch(mParameters,
3469                CAM_INTF_META_SENSOR_FRAME_DURATION,
3470                sizeof(sensorFrameDuration), &sensorFrameDuration);
3471    }
3472
3473    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3474        int32_t sensorSensitivity =
3475            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3476        if (sensorSensitivity <
3477                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3478            sensorSensitivity =
3479                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3480        if (sensorSensitivity >
3481                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3482            sensorSensitivity =
3483                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3484        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3485        rc = AddSetParmEntryToBatch(mParameters,
3486                CAM_INTF_META_SENSOR_SENSITIVITY,
3487                sizeof(sensorSensitivity), &sensorSensitivity);
3488    }
3489
3490    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3491        int32_t shadingMode =
3492            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3493        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3494                sizeof(shadingMode), &shadingMode);
3495    }
3496
3497    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3498        uint8_t shadingStrength =
3499            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3500        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3501                sizeof(shadingStrength), &shadingStrength);
3502    }
3503
3504    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3505        uint8_t fwk_facedetectMode =
3506            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3507        uint8_t facedetectMode =
3508            lookupHalName(FACEDETECT_MODES_MAP,
3509                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3510        rc = AddSetParmEntryToBatch(mParameters,
3511                CAM_INTF_META_STATS_FACEDETECT_MODE,
3512                sizeof(facedetectMode), &facedetectMode);
3513    }
3514
3515    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3516        uint8_t histogramMode =
3517            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3518        rc = AddSetParmEntryToBatch(mParameters,
3519                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3520                sizeof(histogramMode), &histogramMode);
3521    }
3522
3523    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3524        uint8_t sharpnessMapMode =
3525            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3526        rc = AddSetParmEntryToBatch(mParameters,
3527                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3528                sizeof(sharpnessMapMode), &sharpnessMapMode);
3529    }
3530
3531    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3532        uint8_t tonemapMode =
3533            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3534        rc = AddSetParmEntryToBatch(mParameters,
3535                CAM_INTF_META_TONEMAP_MODE,
3536                sizeof(tonemapMode), &tonemapMode);
3537    }
3538    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3539    /*All tonemap channels will have the same number of points*/
3540    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3541        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3542        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3543        cam_rgb_tonemap_curves tonemapCurves;
3544        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3545
3546        /* ch0 = G*/
3547        int point = 0;
3548        cam_tonemap_curve_t tonemapCurveGreen;
3549        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3550            for (int j = 0; j < 2; j++) {
3551               tonemapCurveGreen.tonemap_points[i][j] =
3552                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3553               point++;
3554            }
3555        }
3556        tonemapCurves.curves[0] = tonemapCurveGreen;
3557
3558        /* ch 1 = B */
3559        point = 0;
3560        cam_tonemap_curve_t tonemapCurveBlue;
3561        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3562            for (int j = 0; j < 2; j++) {
3563               tonemapCurveBlue.tonemap_points[i][j] =
3564                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3565               point++;
3566            }
3567        }
3568        tonemapCurves.curves[1] = tonemapCurveBlue;
3569
3570        /* ch 2 = R */
3571        point = 0;
3572        cam_tonemap_curve_t tonemapCurveRed;
3573        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3574            for (int j = 0; j < 2; j++) {
3575               tonemapCurveRed.tonemap_points[i][j] =
3576                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3577               point++;
3578            }
3579        }
3580        tonemapCurves.curves[2] = tonemapCurveRed;
3581
3582        rc = AddSetParmEntryToBatch(mParameters,
3583                CAM_INTF_META_TONEMAP_CURVES,
3584                sizeof(tonemapCurves), &tonemapCurves);
3585    }
3586
3587    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3588        uint8_t captureIntent =
3589            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3590        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3591                sizeof(captureIntent), &captureIntent);
3592    }
3593
3594    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3595        uint8_t blackLevelLock =
3596            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3597        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3598                sizeof(blackLevelLock), &blackLevelLock);
3599    }
3600
3601    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3602        uint8_t lensShadingMapMode =
3603            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3604        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3605                sizeof(lensShadingMapMode), &lensShadingMapMode);
3606    }
3607
3608    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3609        cam_area_t roi;
3610        bool reset = true;
3611        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3612        if (scalerCropSet) {
3613            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3614        }
3615        if (reset) {
3616            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3617                    sizeof(roi), &roi);
3618        }
3619    }
3620
3621    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3622        cam_area_t roi;
3623        bool reset = true;
3624        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3625        if (scalerCropSet) {
3626            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3627        }
3628        if (reset) {
3629            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3630                    sizeof(roi), &roi);
3631        }
3632    }
3633
3634    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3635        cam_area_t roi;
3636        bool reset = true;
3637        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3638        if (scalerCropSet) {
3639            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3640        }
3641        if (reset) {
3642            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3643                    sizeof(roi), &roi);
3644        }
3645    }
3646    return rc;
3647}
3648
3649/*===========================================================================
3650 * FUNCTION   : getJpegSettings
3651 *
3652 * DESCRIPTION: save the jpeg settings in the HAL
3653 *
3654 *
3655 * PARAMETERS :
3656 *   @settings  : frame settings information from framework
3657 *
3658 *
3659 * RETURN     : success: NO_ERROR
3660 *              failure:
3661 *==========================================================================*/
3662int QCamera3HardwareInterface::getJpegSettings
3663                                  (const camera_metadata_t *settings)
3664{
3665    if (mJpegSettings) {
3666        if (mJpegSettings->gps_timestamp) {
3667            free(mJpegSettings->gps_timestamp);
3668            mJpegSettings->gps_timestamp = NULL;
3669        }
3670        if (mJpegSettings->gps_coordinates) {
3671            for (int i = 0; i < 3; i++) {
3672                free(mJpegSettings->gps_coordinates[i]);
3673                mJpegSettings->gps_coordinates[i] = NULL;
3674            }
3675        }
3676        free(mJpegSettings);
3677        mJpegSettings = NULL;
3678    }
3679    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3680    CameraMetadata jpeg_settings;
3681    jpeg_settings = settings;
3682
3683    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3684        mJpegSettings->jpeg_orientation =
3685            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3686    } else {
3687        mJpegSettings->jpeg_orientation = 0;
3688    }
3689    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3690        mJpegSettings->jpeg_quality =
3691            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3692    } else {
3693        mJpegSettings->jpeg_quality = 85;
3694    }
3695    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3696        mJpegSettings->thumbnail_size.width =
3697            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3698        mJpegSettings->thumbnail_size.height =
3699            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3700    } else {
3701        mJpegSettings->thumbnail_size.width = 0;
3702        mJpegSettings->thumbnail_size.height = 0;
3703    }
3704    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3705        for (int i = 0; i < 3; i++) {
3706            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3707            *(mJpegSettings->gps_coordinates[i]) =
3708                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3709        }
3710    } else{
3711       for (int i = 0; i < 3; i++) {
3712            mJpegSettings->gps_coordinates[i] = NULL;
3713        }
3714    }
3715
3716    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3717        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3718        *(mJpegSettings->gps_timestamp) =
3719            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3720    } else {
3721        mJpegSettings->gps_timestamp = NULL;
3722    }
3723
3724    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3725        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3726        for (int i = 0; i < len; i++) {
3727            mJpegSettings->gps_processing_method[i] =
3728                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3729        }
3730        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3731            mJpegSettings->gps_processing_method[len] = '\0';
3732        }
3733    } else {
3734        mJpegSettings->gps_processing_method[0] = '\0';
3735    }
3736
3737    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3738        mJpegSettings->sensor_sensitivity =
3739            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3740    } else {
3741        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3742    }
3743
3744    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3745
3746    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3747        mJpegSettings->lens_focal_length =
3748            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3749    }
3750    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3751        mJpegSettings->exposure_compensation =
3752            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3753    }
3754    mJpegSettings->sharpness = 10; //default value
3755    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3756        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3757        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3758            mJpegSettings->sharpness = 0;
3759        }
3760    }
3761    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3762    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3763    mJpegSettings->is_jpeg_format = true;
3764    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3765    return 0;
3766}
3767
3768/*===========================================================================
3769 * FUNCTION   : captureResultCb
3770 *
3771 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3772 *
3773 * PARAMETERS :
3774 *   @frame  : frame information from mm-camera-interface
3775 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3776 *   @userdata: userdata
3777 *
3778 * RETURN     : NONE
3779 *==========================================================================*/
3780void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3781                camera3_stream_buffer_t *buffer,
3782                uint32_t frame_number, void *userdata)
3783{
3784    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3785    if (hw == NULL) {
3786        ALOGE("%s: Invalid hw %p", __func__, hw);
3787        return;
3788    }
3789
3790    hw->captureResultCb(metadata, buffer, frame_number);
3791    return;
3792}
3793
3794
3795/*===========================================================================
3796 * FUNCTION   : initialize
3797 *
3798 * DESCRIPTION: Pass framework callback pointers to HAL
3799 *
3800 * PARAMETERS :
3801 *
3802 *
3803 * RETURN     : Success : 0
3804 *              Failure: -ENODEV
3805 *==========================================================================*/
3806
3807int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3808                                  const camera3_callback_ops_t *callback_ops)
3809{
3810    ALOGV("%s: E", __func__);
3811    QCamera3HardwareInterface *hw =
3812        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3813    if (!hw) {
3814        ALOGE("%s: NULL camera device", __func__);
3815        return -ENODEV;
3816    }
3817
3818    int rc = hw->initialize(callback_ops);
3819    ALOGV("%s: X", __func__);
3820    return rc;
3821}
3822
3823/*===========================================================================
3824 * FUNCTION   : configure_streams
3825 *
3826 * DESCRIPTION:
3827 *
3828 * PARAMETERS :
3829 *
3830 *
3831 * RETURN     : Success: 0
3832 *              Failure: -EINVAL (if stream configuration is invalid)
3833 *                       -ENODEV (fatal error)
3834 *==========================================================================*/
3835
3836int QCamera3HardwareInterface::configure_streams(
3837        const struct camera3_device *device,
3838        camera3_stream_configuration_t *stream_list)
3839{
3840    ALOGV("%s: E", __func__);
3841    QCamera3HardwareInterface *hw =
3842        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3843    if (!hw) {
3844        ALOGE("%s: NULL camera device", __func__);
3845        return -ENODEV;
3846    }
3847    int rc = hw->configureStreams(stream_list);
3848    ALOGV("%s: X", __func__);
3849    return rc;
3850}
3851
3852/*===========================================================================
3853 * FUNCTION   : register_stream_buffers
3854 *
3855 * DESCRIPTION: Register stream buffers with the device
3856 *
3857 * PARAMETERS :
3858 *
3859 * RETURN     :
3860 *==========================================================================*/
3861int QCamera3HardwareInterface::register_stream_buffers(
3862        const struct camera3_device *device,
3863        const camera3_stream_buffer_set_t *buffer_set)
3864{
3865    ALOGV("%s: E", __func__);
3866    QCamera3HardwareInterface *hw =
3867        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3868    if (!hw) {
3869        ALOGE("%s: NULL camera device", __func__);
3870        return -ENODEV;
3871    }
3872    int rc = hw->registerStreamBuffers(buffer_set);
3873    ALOGV("%s: X", __func__);
3874    return rc;
3875}
3876
3877/*===========================================================================
3878 * FUNCTION   : construct_default_request_settings
3879 *
3880 * DESCRIPTION: Configure a settings buffer to meet the required use case
3881 *
3882 * PARAMETERS :
3883 *
3884 *
3885 * RETURN     : Success: Return valid metadata
3886 *              Failure: Return NULL
3887 *==========================================================================*/
3888const camera_metadata_t* QCamera3HardwareInterface::
3889    construct_default_request_settings(const struct camera3_device *device,
3890                                        int type)
3891{
3892
3893    ALOGV("%s: E", __func__);
3894    camera_metadata_t* fwk_metadata = NULL;
3895    QCamera3HardwareInterface *hw =
3896        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3897    if (!hw) {
3898        ALOGE("%s: NULL camera device", __func__);
3899        return NULL;
3900    }
3901
3902    fwk_metadata = hw->translateCapabilityToMetadata(type);
3903
3904    ALOGV("%s: X", __func__);
3905    return fwk_metadata;
3906}
3907
3908/*===========================================================================
3909 * FUNCTION   : process_capture_request
3910 *
3911 * DESCRIPTION:
3912 *
3913 * PARAMETERS :
3914 *
3915 *
3916 * RETURN     :
3917 *==========================================================================*/
3918int QCamera3HardwareInterface::process_capture_request(
3919                    const struct camera3_device *device,
3920                    camera3_capture_request_t *request)
3921{
3922    ALOGV("%s: E", __func__);
3923    QCamera3HardwareInterface *hw =
3924        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3925    if (!hw) {
3926        ALOGE("%s: NULL camera device", __func__);
3927        return -EINVAL;
3928    }
3929
3930    int rc = hw->processCaptureRequest(request);
3931    ALOGV("%s: X", __func__);
3932    return rc;
3933}
3934
3935/*===========================================================================
3936 * FUNCTION   : get_metadata_vendor_tag_ops
3937 *
3938 * DESCRIPTION:
3939 *
3940 * PARAMETERS :
3941 *
3942 *
3943 * RETURN     :
3944 *==========================================================================*/
3945
3946void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3947                const struct camera3_device *device,
3948                vendor_tag_query_ops_t* ops)
3949{
3950    ALOGV("%s: E", __func__);
3951    QCamera3HardwareInterface *hw =
3952        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3953    if (!hw) {
3954        ALOGE("%s: NULL camera device", __func__);
3955        return;
3956    }
3957
3958    hw->getMetadataVendorTagOps(ops);
3959    ALOGV("%s: X", __func__);
3960    return;
3961}
3962
3963/*===========================================================================
3964 * FUNCTION   : dump
3965 *
3966 * DESCRIPTION:
3967 *
3968 * PARAMETERS :
3969 *
3970 *
3971 * RETURN     :
3972 *==========================================================================*/
3973
3974void QCamera3HardwareInterface::dump(
3975                const struct camera3_device *device, int fd)
3976{
3977    ALOGV("%s: E", __func__);
3978    QCamera3HardwareInterface *hw =
3979        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3980    if (!hw) {
3981        ALOGE("%s: NULL camera device", __func__);
3982        return;
3983    }
3984
3985    hw->dump(fd);
3986    ALOGV("%s: X", __func__);
3987    return;
3988}
3989
3990/*===========================================================================
3991 * FUNCTION   : flush
3992 *
3993 * DESCRIPTION:
3994 *
3995 * PARAMETERS :
3996 *
3997 *
3998 * RETURN     :
3999 *==========================================================================*/
4000
4001int QCamera3HardwareInterface::flush(
4002                const struct camera3_device *device)
4003{
4004    int rc;
4005    ALOGV("%s: E", __func__);
4006    QCamera3HardwareInterface *hw =
4007        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4008    if (!hw) {
4009        ALOGE("%s: NULL camera device", __func__);
4010        return -EINVAL;
4011    }
4012
4013    rc = hw->flush();
4014    ALOGV("%s: X", __func__);
4015    return rc;
4016}
4017
4018/*===========================================================================
4019 * FUNCTION   : close_camera_device
4020 *
4021 * DESCRIPTION:
4022 *
4023 * PARAMETERS :
4024 *
4025 *
4026 * RETURN     :
4027 *==========================================================================*/
4028int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4029{
4030    ALOGV("%s: E", __func__);
4031    int ret = NO_ERROR;
4032    QCamera3HardwareInterface *hw =
4033        reinterpret_cast<QCamera3HardwareInterface *>(
4034            reinterpret_cast<camera3_device_t *>(device)->priv);
4035    if (!hw) {
4036        ALOGE("NULL camera device");
4037        return BAD_VALUE;
4038    }
4039    delete hw;
4040
4041    pthread_mutex_lock(&mCameraSessionLock);
4042    mCameraSessionActive = 0;
4043    pthread_mutex_unlock(&mCameraSessionLock);
4044    ALOGV("%s: X", __func__);
4045    return ret;
4046}
4047
4048/*===========================================================================
4049 * FUNCTION   : getWaveletDenoiseProcessPlate
4050 *
4051 * DESCRIPTION: query wavelet denoise process plate
4052 *
4053 * PARAMETERS : None
4054 *
4055 * RETURN     : WNR prcocess plate vlaue
4056 *==========================================================================*/
4057cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4058{
4059    char prop[PROPERTY_VALUE_MAX];
4060    memset(prop, 0, sizeof(prop));
4061    property_get("persist.denoise.process.plates", prop, "0");
4062    int processPlate = atoi(prop);
4063    switch(processPlate) {
4064    case 0:
4065        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4066    case 1:
4067        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4068    case 2:
4069        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4070    case 3:
4071        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4072    default:
4073        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4074    }
4075}
4076
4077/*===========================================================================
4078 * FUNCTION   : needRotationReprocess
4079 *
4080 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4081 *
4082 * PARAMETERS : none
4083 *
4084 * RETURN     : true: needed
4085 *              false: no need
4086 *==========================================================================*/
4087bool QCamera3HardwareInterface::needRotationReprocess()
4088{
4089
4090    if (!mJpegSettings->is_jpeg_format) {
4091        // RAW image, no need to reprocess
4092        return false;
4093    }
4094
4095    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4096        mJpegSettings->jpeg_orientation > 0) {
4097        // current rotation is not zero, and pp has the capability to process rotation
4098        ALOGD("%s: need do reprocess for rotation", __func__);
4099        return true;
4100    }
4101
4102    return false;
4103}
4104
4105/*===========================================================================
4106 * FUNCTION   : needReprocess
4107 *
4108 * DESCRIPTION: if reprocess in needed
4109 *
4110 * PARAMETERS : none
4111 *
4112 * RETURN     : true: needed
4113 *              false: no need
4114 *==========================================================================*/
4115bool QCamera3HardwareInterface::needReprocess()
4116{
4117    if (!mJpegSettings->is_jpeg_format) {
4118        // RAW image, no need to reprocess
4119        return false;
4120    }
4121
4122    if ((mJpegSettings->min_required_pp_mask > 0) ||
4123         isWNREnabled()) {
4124        // TODO: add for ZSL HDR later
4125        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4126        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4127        return true;
4128    }
4129    return needRotationReprocess();
4130}
4131
4132/*===========================================================================
4133 * FUNCTION   : addOnlineReprocChannel
4134 *
4135 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4136 *              coming from input channel
4137 *
4138 * PARAMETERS :
4139 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4140 *
4141 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4142 *==========================================================================*/
4143QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4144              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4145{
4146    int32_t rc = NO_ERROR;
4147    QCamera3ReprocessChannel *pChannel = NULL;
4148    if (pInputChannel == NULL) {
4149        ALOGE("%s: input channel obj is NULL", __func__);
4150        return NULL;
4151    }
4152
4153    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4154            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4155    if (NULL == pChannel) {
4156        ALOGE("%s: no mem for reprocess channel", __func__);
4157        return NULL;
4158    }
4159
4160    // Capture channel, only need snapshot and postview streams start together
4161    mm_camera_channel_attr_t attr;
4162    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4163    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4164    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4165    rc = pChannel->initialize();
4166    if (rc != NO_ERROR) {
4167        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4168        delete pChannel;
4169        return NULL;
4170    }
4171
4172    // pp feature config
4173    cam_pp_feature_config_t pp_config;
4174    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4175    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4176        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4177        pp_config.sharpness = mJpegSettings->sharpness;
4178    }
4179
4180    if (isWNREnabled()) {
4181        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4182        pp_config.denoise2d.denoise_enable = 1;
4183        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4184    }
4185    if (needRotationReprocess()) {
4186        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4187        int rotation = mJpegSettings->jpeg_orientation;
4188        if (rotation == 0) {
4189            pp_config.rotation = ROTATE_0;
4190        } else if (rotation == 90) {
4191            pp_config.rotation = ROTATE_90;
4192        } else if (rotation == 180) {
4193            pp_config.rotation = ROTATE_180;
4194        } else if (rotation == 270) {
4195            pp_config.rotation = ROTATE_270;
4196        }
4197    }
4198
4199   rc = pChannel->addReprocStreamsFromSource(pp_config,
4200                                             pInputChannel,
4201                                             mMetadataChannel);
4202
4203    if (rc != NO_ERROR) {
4204        delete pChannel;
4205        return NULL;
4206    }
4207    return pChannel;
4208}
4209
4210int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4211{
4212    return gCamCapability[mCameraId]->min_num_pp_bufs;
4213}
4214
4215bool QCamera3HardwareInterface::isWNREnabled() {
4216    return gCamCapability[mCameraId]->isWnrSupported;
4217}
4218
4219}; //end namespace qcamera
4220