QCamera3HWI.cpp revision ba7843eb48941909a360512af9d0596790d7d887
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        if (mMetadataChannel) {
254            mMetadataChannel->stop();
255            delete mMetadataChannel;
256            mMetadataChannel = NULL;
257        }
258        deinitParameters();
259    }
260
261    if (mCameraOpened)
262        closeCamera();
263
264    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
265        if (mDefaultMetadata[i])
266            free_camera_metadata(mDefaultMetadata[i]);
267
268    pthread_cond_destroy(&mRequestCond);
269
270    pthread_mutex_destroy(&mMutex);
271    ALOGV("%s: X", __func__);
272}
273
274/*===========================================================================
275 * FUNCTION   : openCamera
276 *
277 * DESCRIPTION: open camera
278 *
279 * PARAMETERS :
280 *   @hw_device  : double ptr for camera device struct
281 *
282 * RETURN     : int32_t type of status
283 *              NO_ERROR  -- success
284 *              none-zero failure code
285 *==========================================================================*/
286int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
287{
288    int rc = 0;
289    pthread_mutex_lock(&mCameraSessionLock);
290    if (mCameraSessionActive) {
291        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
292        pthread_mutex_unlock(&mCameraSessionLock);
293        return INVALID_OPERATION;
294    }
295
296    if (mCameraOpened) {
297        *hw_device = NULL;
298        return PERMISSION_DENIED;
299    }
300
301    rc = openCamera();
302    if (rc == 0) {
303        *hw_device = &mCameraDevice.common;
304        mCameraSessionActive = 1;
305    } else
306        *hw_device = NULL;
307
308#ifdef HAS_MULTIMEDIA_HINTS
309    if (rc == 0) {
310        if (m_pPowerModule) {
311            if (m_pPowerModule->powerHint) {
312                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
313                        (void *)"state=1");
314            }
315        }
316    }
317#endif
318    pthread_mutex_unlock(&mCameraSessionLock);
319    return rc;
320}
321
322/*===========================================================================
323 * FUNCTION   : openCamera
324 *
325 * DESCRIPTION: open camera
326 *
327 * PARAMETERS : none
328 *
329 * RETURN     : int32_t type of status
330 *              NO_ERROR  -- success
331 *              none-zero failure code
332 *==========================================================================*/
333int QCamera3HardwareInterface::openCamera()
334{
335    if (mCameraHandle) {
336        ALOGE("Failure: Camera already opened");
337        return ALREADY_EXISTS;
338    }
339    mCameraHandle = camera_open(mCameraId);
340    if (!mCameraHandle) {
341        ALOGE("camera_open failed.");
342        return UNKNOWN_ERROR;
343    }
344
345    mCameraOpened = true;
346
347    return NO_ERROR;
348}
349
350/*===========================================================================
351 * FUNCTION   : closeCamera
352 *
353 * DESCRIPTION: close camera
354 *
355 * PARAMETERS : none
356 *
357 * RETURN     : int32_t type of status
358 *              NO_ERROR  -- success
359 *              none-zero failure code
360 *==========================================================================*/
361int QCamera3HardwareInterface::closeCamera()
362{
363    int rc = NO_ERROR;
364
365    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
366    mCameraHandle = NULL;
367    mCameraOpened = false;
368
369#ifdef HAS_MULTIMEDIA_HINTS
370    if (rc == NO_ERROR) {
371        if (m_pPowerModule) {
372            if (m_pPowerModule->powerHint) {
373                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
374                        (void *)"state=0");
375            }
376        }
377    }
378#endif
379
380    return rc;
381}
382
383/*===========================================================================
384 * FUNCTION   : initialize
385 *
386 * DESCRIPTION: Initialize frameworks callback functions
387 *
388 * PARAMETERS :
389 *   @callback_ops : callback function to frameworks
390 *
391 * RETURN     :
392 *
393 *==========================================================================*/
394int QCamera3HardwareInterface::initialize(
395        const struct camera3_callback_ops *callback_ops)
396{
397    int rc;
398
399    pthread_mutex_lock(&mMutex);
400
401    rc = initParameters();
402    if (rc < 0) {
403        ALOGE("%s: initParamters failed %d", __func__, rc);
404       goto err1;
405    }
406    mCallbackOps = callback_ops;
407
408    pthread_mutex_unlock(&mMutex);
409    mCameraInitialized = true;
410    return 0;
411
412err1:
413    pthread_mutex_unlock(&mMutex);
414    return rc;
415}
416
417/*===========================================================================
418 * FUNCTION   : configureStreams
419 *
420 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
421 *              and output streams.
422 *
423 * PARAMETERS :
424 *   @stream_list : streams to be configured
425 *
426 * RETURN     :
427 *
428 *==========================================================================*/
429int QCamera3HardwareInterface::configureStreams(
430        camera3_stream_configuration_t *streamList)
431{
432    int rc = 0;
433    mIsZslMode = false;
434
435    // Sanity check stream_list
436    if (streamList == NULL) {
437        ALOGE("%s: NULL stream configuration", __func__);
438        return BAD_VALUE;
439    }
440    if (streamList->streams == NULL) {
441        ALOGE("%s: NULL stream list", __func__);
442        return BAD_VALUE;
443    }
444
445    if (streamList->num_streams < 1) {
446        ALOGE("%s: Bad number of streams requested: %d", __func__,
447                streamList->num_streams);
448        return BAD_VALUE;
449    }
450
451    /* first invalidate all the steams in the mStreamList
452     * if they appear again, they will be validated */
453    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
454            it != mStreamInfo.end(); it++) {
455        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
456        channel->stop();
457        (*it)->status = INVALID;
458    }
459    if (mMetadataChannel) {
460        /* If content of mStreamInfo is not 0, there is metadata stream */
461        mMetadataChannel->stop();
462    }
463
464    pthread_mutex_lock(&mMutex);
465
466    camera3_stream_t *inputStream = NULL;
467    camera3_stream_t *jpegStream = NULL;
468    cam_stream_size_info_t stream_config_info;
469
470    for (size_t i = 0; i < streamList->num_streams; i++) {
471        camera3_stream_t *newStream = streamList->streams[i];
472        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
473                __func__, newStream->stream_type, newStream->format,
474                 newStream->width, newStream->height);
475        //if the stream is in the mStreamList validate it
476        bool stream_exists = false;
477        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
478                it != mStreamInfo.end(); it++) {
479            if ((*it)->stream == newStream) {
480                QCamera3Channel *channel =
481                    (QCamera3Channel*)(*it)->stream->priv;
482                stream_exists = true;
483                (*it)->status = RECONFIGURE;
484                /*delete the channel object associated with the stream because
485                  we need to reconfigure*/
486                delete channel;
487                (*it)->stream->priv = NULL;
488            }
489        }
490        if (!stream_exists) {
491            //new stream
492            stream_info_t* stream_info;
493            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
494            stream_info->stream = newStream;
495            stream_info->status = VALID;
496            stream_info->registered = 0;
497            mStreamInfo.push_back(stream_info);
498        }
499        if (newStream->stream_type == CAMERA3_STREAM_INPUT
500                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
501            if (inputStream != NULL) {
502                ALOGE("%s: Multiple input streams requested!", __func__);
503                pthread_mutex_unlock(&mMutex);
504                return BAD_VALUE;
505            }
506            inputStream = newStream;
507        }
508        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
509            jpegStream = newStream;
510        }
511    }
512    mInputStream = inputStream;
513
514    /*clean up invalid streams*/
515    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
516            it != mStreamInfo.end();) {
517        if(((*it)->status) == INVALID){
518            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
519            delete channel;
520            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
521            free(*it);
522            it = mStreamInfo.erase(it);
523        } else {
524            it++;
525        }
526    }
527    if (mMetadataChannel) {
528        delete mMetadataChannel;
529        mMetadataChannel = NULL;
530    }
531
532    //Create metadata channel and initialize it
533    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
534                    mCameraHandle->ops, captureResultCb,
535                    &gCamCapability[mCameraId]->padding_info, this);
536    if (mMetadataChannel == NULL) {
537        ALOGE("%s: failed to allocate metadata channel", __func__);
538        rc = -ENOMEM;
539        pthread_mutex_unlock(&mMutex);
540        return rc;
541    }
542    rc = mMetadataChannel->initialize();
543    if (rc < 0) {
544        ALOGE("%s: metadata channel initialization failed", __func__);
545        delete mMetadataChannel;
546        pthread_mutex_unlock(&mMutex);
547        return rc;
548    }
549
550    /* Allocate channel objects for the requested streams */
551    for (size_t i = 0; i < streamList->num_streams; i++) {
552        camera3_stream_t *newStream = streamList->streams[i];
553        uint32_t stream_usage = newStream->usage;
554        stream_config_info.stream_sizes[i].width = newStream->width;
555        stream_config_info.stream_sizes[i].height = newStream->height;
556        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
557            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
558            //for zsl stream the size is jpeg size
559            stream_config_info.stream_sizes[i].width = jpegStream->width;
560            stream_config_info.stream_sizes[i].height = jpegStream->height;
561            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
562        } else {
563           //for non zsl streams find out the format
564           switch (newStream->format) {
565           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
566              {
567                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
569                 } else {
570                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
571                 }
572              }
573              break;
574           case HAL_PIXEL_FORMAT_YCbCr_420_888:
575              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
576              break;
577           case HAL_PIXEL_FORMAT_BLOB:
578              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
579              break;
580           default:
581              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
582              break;
583           }
584        }
585        if (newStream->priv == NULL) {
586            //New stream, construct channel
587            switch (newStream->stream_type) {
588            case CAMERA3_STREAM_INPUT:
589                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
590                break;
591            case CAMERA3_STREAM_BIDIRECTIONAL:
592                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
593                    GRALLOC_USAGE_HW_CAMERA_WRITE;
594                break;
595            case CAMERA3_STREAM_OUTPUT:
596                /* For video encoding stream, set read/write rarely
597                 * flag so that they may be set to un-cached */
598                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
599                    newStream->usage =
600                         (GRALLOC_USAGE_SW_READ_RARELY |
601                         GRALLOC_USAGE_SW_WRITE_RARELY |
602                         GRALLOC_USAGE_HW_CAMERA_WRITE);
603                else
604                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
605                break;
606            default:
607                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
608                break;
609            }
610
611            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
612                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
613                QCamera3Channel *channel;
614                switch (newStream->format) {
615                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
616                case HAL_PIXEL_FORMAT_YCbCr_420_888:
617                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
618                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
619                        jpegStream) {
620                        uint32_t width = jpegStream->width;
621                        uint32_t height = jpegStream->height;
622                        mIsZslMode = true;
623                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
624                            mCameraHandle->ops, captureResultCb,
625                            &gCamCapability[mCameraId]->padding_info, this, newStream,
626                            width, height);
627                    } else
628                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
629                            mCameraHandle->ops, captureResultCb,
630                            &gCamCapability[mCameraId]->padding_info, this, newStream);
631                    if (channel == NULL) {
632                        ALOGE("%s: allocation of channel failed", __func__);
633                        pthread_mutex_unlock(&mMutex);
634                        return -ENOMEM;
635                    }
636
637                    newStream->priv = channel;
638                    break;
639                case HAL_PIXEL_FORMAT_BLOB:
640                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
641                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
642                            mCameraHandle->ops, captureResultCb,
643                            &gCamCapability[mCameraId]->padding_info, this, newStream);
644                    if (mPictureChannel == NULL) {
645                        ALOGE("%s: allocation of channel failed", __func__);
646                        pthread_mutex_unlock(&mMutex);
647                        return -ENOMEM;
648                    }
649                    newStream->priv = (QCamera3Channel*)mPictureChannel;
650                    break;
651
652                //TODO: Add support for app consumed format?
653                default:
654                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
655                    break;
656                }
657            }
658        } else {
659            // Channel already exists for this stream
660            // Do nothing for now
661        }
662    }
663    /*For the streams to be reconfigured we need to register the buffers
664      since the framework wont*/
665    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
666            it != mStreamInfo.end(); it++) {
667        if ((*it)->status == RECONFIGURE) {
668            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
669            /*only register buffers for streams that have already been
670              registered*/
671            if ((*it)->registered) {
672                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
673                        (*it)->buffer_set.buffers);
674                if (rc != NO_ERROR) {
675                    ALOGE("%s: Failed to register the buffers of old stream,\
676                            rc = %d", __func__, rc);
677                }
678                ALOGV("%s: channel %p has %d buffers",
679                        __func__, channel, (*it)->buffer_set.num_buffers);
680            }
681        }
682
683        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
684        if (index == NAME_NOT_FOUND) {
685            mPendingBuffersMap.add((*it)->stream, 0);
686        } else {
687            mPendingBuffersMap.editValueAt(index) = 0;
688        }
689    }
690
691    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
692    mPendingRequestsList.clear();
693
694    /*flush the metadata list*/
695    if (!mStoredMetadataList.empty()) {
696        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
697              m != mStoredMetadataList.end(); m++) {
698            mMetadataChannel->bufDone(m->meta_buf);
699            free(m->meta_buf);
700            m = mStoredMetadataList.erase(m);
701        }
702    }
703    int32_t hal_version = CAM_HAL_V3;
704    stream_config_info.num_streams = streamList->num_streams;
705
706    //settings/parameters don't carry over for new configureStreams
707    memset(mParameters, 0, sizeof(parm_buffer_t));
708
709    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
710    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
711                sizeof(hal_version), &hal_version);
712
713    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
714                sizeof(stream_config_info), &stream_config_info);
715
716    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
717
718    mFirstRequest = true;
719
720    //Get min frame duration for this streams configuration
721    deriveMinFrameDuration();
722
723    pthread_mutex_unlock(&mMutex);
724    return rc;
725}
726
727/*===========================================================================
728 * FUNCTION   : validateCaptureRequest
729 *
730 * DESCRIPTION: validate a capture request from camera service
731 *
732 * PARAMETERS :
733 *   @request : request from framework to process
734 *
735 * RETURN     :
736 *
737 *==========================================================================*/
738int QCamera3HardwareInterface::validateCaptureRequest(
739                    camera3_capture_request_t *request)
740{
741    ssize_t idx = 0;
742    const camera3_stream_buffer_t *b;
743    CameraMetadata meta;
744
745    /* Sanity check the request */
746    if (request == NULL) {
747        ALOGE("%s: NULL capture request", __func__);
748        return BAD_VALUE;
749    }
750
751    uint32_t frameNumber = request->frame_number;
752    if (request->input_buffer != NULL &&
753            request->input_buffer->stream != mInputStream) {
754        ALOGE("%s: Request %d: Input buffer not from input stream!",
755                __FUNCTION__, frameNumber);
756        return BAD_VALUE;
757    }
758    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
759        ALOGE("%s: Request %d: No output buffers provided!",
760                __FUNCTION__, frameNumber);
761        return BAD_VALUE;
762    }
763    if (request->input_buffer != NULL) {
764        b = request->input_buffer;
765        QCamera3Channel *channel =
766            static_cast<QCamera3Channel*>(b->stream->priv);
767        if (channel == NULL) {
768            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
769                    __func__, frameNumber, idx);
770            return BAD_VALUE;
771        }
772        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
773            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
774                    __func__, frameNumber, idx);
775            return BAD_VALUE;
776        }
777        if (b->release_fence != -1) {
778            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
779                    __func__, frameNumber, idx);
780            return BAD_VALUE;
781        }
782        if (b->buffer == NULL) {
783            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
784                    __func__, frameNumber, idx);
785            return BAD_VALUE;
786        }
787    }
788
789    // Validate all buffers
790    b = request->output_buffers;
791    do {
792        QCamera3Channel *channel =
793                static_cast<QCamera3Channel*>(b->stream->priv);
794        if (channel == NULL) {
795            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
796                    __func__, frameNumber, idx);
797            return BAD_VALUE;
798        }
799        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
800            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
801                    __func__, frameNumber, idx);
802            return BAD_VALUE;
803        }
804        if (b->release_fence != -1) {
805            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
806                    __func__, frameNumber, idx);
807            return BAD_VALUE;
808        }
809        if (b->buffer == NULL) {
810            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
811                    __func__, frameNumber, idx);
812            return BAD_VALUE;
813        }
814        idx++;
815        b = request->output_buffers + idx;
816    } while (idx < (ssize_t)request->num_output_buffers);
817
818    return NO_ERROR;
819}
820
821/*===========================================================================
822 * FUNCTION   : deriveMinFrameDuration
823 *
824 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
825 *              on currently configured streams.
826 *
827 * PARAMETERS : NONE
828 *
829 * RETURN     : NONE
830 *
831 *==========================================================================*/
832void QCamera3HardwareInterface::deriveMinFrameDuration()
833{
834    int32_t maxJpegDimension, maxProcessedDimension;
835
836    maxJpegDimension = 0;
837    maxProcessedDimension = 0;
838
839    // Figure out maximum jpeg, processed, and raw dimensions
840    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
841        it != mStreamInfo.end(); it++) {
842
843        // Input stream doesn't have valid stream_type
844        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
845            continue;
846
847        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
848        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
849            if (dimension > maxJpegDimension)
850                maxJpegDimension = dimension;
851        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
852            if (dimension > maxProcessedDimension)
853                maxProcessedDimension = dimension;
854        }
855    }
856
857    //Assume all jpeg dimensions are in processed dimensions.
858    if (maxJpegDimension > maxProcessedDimension)
859        maxProcessedDimension = maxJpegDimension;
860
861    //Find minimum durations for processed, jpeg, and raw
862    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
863    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
864        if (maxProcessedDimension ==
865            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
866            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
867            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
868            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
869            break;
870        }
871    }
872}
873
874/*===========================================================================
875 * FUNCTION   : getMinFrameDuration
876 *
877 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
878 *              and current request configuration.
879 *
880 * PARAMETERS : @request: requset sent by the frameworks
881 *
882 * RETURN     : min farme duration for a particular request
883 *
884 *==========================================================================*/
885int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
886{
887    bool hasJpegStream = false;
888    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
889        const camera3_stream_t *stream = request->output_buffers[i].stream;
890        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
891            hasJpegStream = true;
892    }
893
894    if (!hasJpegStream)
895        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
896    else
897        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
898}
899
900/*===========================================================================
901 * FUNCTION   : registerStreamBuffers
902 *
903 * DESCRIPTION: Register buffers for a given stream with the HAL device.
904 *
905 * PARAMETERS :
906 *   @stream_list : streams to be configured
907 *
908 * RETURN     :
909 *
910 *==========================================================================*/
911int QCamera3HardwareInterface::registerStreamBuffers(
912        const camera3_stream_buffer_set_t *buffer_set)
913{
914    int rc = 0;
915
916    pthread_mutex_lock(&mMutex);
917
918    if (buffer_set == NULL) {
919        ALOGE("%s: Invalid buffer_set parameter.", __func__);
920        pthread_mutex_unlock(&mMutex);
921        return -EINVAL;
922    }
923    if (buffer_set->stream == NULL) {
924        ALOGE("%s: Invalid stream parameter.", __func__);
925        pthread_mutex_unlock(&mMutex);
926        return -EINVAL;
927    }
928    if (buffer_set->num_buffers < 1) {
929        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
930        pthread_mutex_unlock(&mMutex);
931        return -EINVAL;
932    }
933    if (buffer_set->buffers == NULL) {
934        ALOGE("%s: Invalid buffers parameter.", __func__);
935        pthread_mutex_unlock(&mMutex);
936        return -EINVAL;
937    }
938
939    camera3_stream_t *stream = buffer_set->stream;
940    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
941
942    //set the buffer_set in the mStreamInfo array
943    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
944            it != mStreamInfo.end(); it++) {
945        if ((*it)->stream == stream) {
946            uint32_t numBuffers = buffer_set->num_buffers;
947            (*it)->buffer_set.stream = buffer_set->stream;
948            (*it)->buffer_set.num_buffers = numBuffers;
949            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
950            if ((*it)->buffer_set.buffers == NULL) {
951                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
952                pthread_mutex_unlock(&mMutex);
953                return -ENOMEM;
954            }
955            for (size_t j = 0; j < numBuffers; j++){
956                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
957            }
958            (*it)->registered = 1;
959        }
960    }
961    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
962    if (rc < 0) {
963        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
964        pthread_mutex_unlock(&mMutex);
965        return -ENODEV;
966    }
967
968    pthread_mutex_unlock(&mMutex);
969    return NO_ERROR;
970}
971
972/*===========================================================================
973 * FUNCTION   : processCaptureRequest
974 *
975 * DESCRIPTION: process a capture request from camera service
976 *
977 * PARAMETERS :
978 *   @request : request from framework to process
979 *
980 * RETURN     :
981 *
982 *==========================================================================*/
983int QCamera3HardwareInterface::processCaptureRequest(
984                    camera3_capture_request_t *request)
985{
986    int rc = NO_ERROR;
987    int32_t request_id;
988    CameraMetadata meta;
989    MetadataBufferInfo reproc_meta;
990    int queueMetadata = 0;
991
992    pthread_mutex_lock(&mMutex);
993
994    rc = validateCaptureRequest(request);
995    if (rc != NO_ERROR) {
996        ALOGE("%s: incoming request is not valid", __func__);
997        pthread_mutex_unlock(&mMutex);
998        return rc;
999    }
1000
1001    meta = request->settings;
1002
1003    // For first capture request, send capture intent, and
1004    // stream on all streams
1005    if (mFirstRequest) {
1006
1007        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1008            int32_t hal_version = CAM_HAL_V3;
1009            uint8_t captureIntent =
1010                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1011
1012            memset(mParameters, 0, sizeof(parm_buffer_t));
1013            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1015                sizeof(hal_version), &hal_version);
1016            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1017                sizeof(captureIntent), &captureIntent);
1018            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1019                mParameters);
1020        }
1021
1022        mMetadataChannel->start();
1023        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1024            it != mStreamInfo.end(); it++) {
1025            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1026            channel->start();
1027        }
1028    }
1029
1030    uint32_t frameNumber = request->frame_number;
1031    uint32_t streamTypeMask = 0;
1032
1033    if (meta.exists(ANDROID_REQUEST_ID)) {
1034        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1035        mCurrentRequestId = request_id;
1036        ALOGV("%s: Received request with id: %d",__func__, request_id);
1037    } else if (mFirstRequest || mCurrentRequestId == -1){
1038        ALOGE("%s: Unable to find request id field, \
1039                & no previous id available", __func__);
1040        return NAME_NOT_FOUND;
1041    } else {
1042        ALOGV("%s: Re-using old request id", __func__);
1043        request_id = mCurrentRequestId;
1044    }
1045
1046    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1047                                    __func__, __LINE__,
1048                                    request->num_output_buffers,
1049                                    request->input_buffer,
1050                                    frameNumber);
1051    // Acquire all request buffers first
1052    int blob_request = 0;
1053    for (size_t i = 0; i < request->num_output_buffers; i++) {
1054        const camera3_stream_buffer_t& output = request->output_buffers[i];
1055        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1056        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1057
1058        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1059        //Call function to store local copy of jpeg data for encode params.
1060            blob_request = 1;
1061            rc = getJpegSettings(request->settings);
1062            if (rc < 0) {
1063                ALOGE("%s: failed to get jpeg parameters", __func__);
1064                pthread_mutex_unlock(&mMutex);
1065                return rc;
1066            }
1067        }
1068
1069        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1070        if (rc != OK) {
1071            ALOGE("%s: fence wait failed %d", __func__, rc);
1072            pthread_mutex_unlock(&mMutex);
1073            return rc;
1074        }
1075        streamTypeMask |= channel->getStreamTypeMask();
1076    }
1077
1078    rc = setFrameParameters(request, streamTypeMask);
1079    if (rc < 0) {
1080        ALOGE("%s: fail to set frame parameters", __func__);
1081        pthread_mutex_unlock(&mMutex);
1082        return rc;
1083    }
1084
1085    /* Update pending request list and pending buffers map */
1086    PendingRequestInfo pendingRequest;
1087    pendingRequest.frame_number = frameNumber;
1088    pendingRequest.num_buffers = request->num_output_buffers;
1089    pendingRequest.request_id = request_id;
1090    pendingRequest.blob_request = blob_request;
1091    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1092
1093    for (size_t i = 0; i < request->num_output_buffers; i++) {
1094        RequestedBufferInfo requestedBuf;
1095        requestedBuf.stream = request->output_buffers[i].stream;
1096        requestedBuf.buffer = NULL;
1097        pendingRequest.buffers.push_back(requestedBuf);
1098
1099        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1100    }
1101    mPendingRequestsList.push_back(pendingRequest);
1102
1103    // Notify metadata channel we receive a request
1104    mMetadataChannel->request(NULL, frameNumber);
1105
1106    // Call request on other streams
1107    for (size_t i = 0; i < request->num_output_buffers; i++) {
1108        const camera3_stream_buffer_t& output = request->output_buffers[i];
1109        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1110        mm_camera_buf_def_t *pInputBuffer = NULL;
1111
1112        if (channel == NULL) {
1113            ALOGE("%s: invalid channel pointer for stream", __func__);
1114            continue;
1115        }
1116
1117        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1118            QCamera3RegularChannel* inputChannel = NULL;
1119            if(request->input_buffer != NULL){
1120                //Try to get the internal format
1121                inputChannel = (QCamera3RegularChannel*)
1122                    request->input_buffer->stream->priv;
1123                if(inputChannel == NULL ){
1124                    ALOGE("%s: failed to get input channel handle", __func__);
1125                } else {
1126                    pInputBuffer =
1127                        inputChannel->getInternalFormatBuffer(
1128                                request->input_buffer->buffer);
1129                    ALOGD("%s: Input buffer dump",__func__);
1130                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1131                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1132                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1133                    ALOGD("Handle:%p", request->input_buffer->buffer);
1134                    //TODO: need to get corresponding metadata and send it to pproc
1135                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1136                         m != mStoredMetadataList.end(); m++) {
1137                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1138                            reproc_meta.meta_buf = m->meta_buf;
1139                            queueMetadata = 1;
1140                            break;
1141                        }
1142                    }
1143                }
1144            }
1145            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1146                            pInputBuffer,(QCamera3Channel*)inputChannel);
1147            if (queueMetadata) {
1148                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1149            }
1150        } else {
1151            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1152                __LINE__, output.buffer, frameNumber);
1153            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1154                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1155                     m != mStoredMetadataList.end(); m++) {
1156                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1157                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1158                            mMetadataChannel->bufDone(m->meta_buf);
1159                            free(m->meta_buf);
1160                            m = mStoredMetadataList.erase(m);
1161                            break;
1162                        }
1163                   }
1164                }
1165            }
1166            rc = channel->request(output.buffer, frameNumber);
1167        }
1168        if (rc < 0)
1169            ALOGE("%s: request failed", __func__);
1170    }
1171
1172    mFirstRequest = false;
1173    // Added a timed condition wait
1174    struct timespec ts;
1175    uint8_t isValidTimeout = 1;
1176    rc = clock_gettime(CLOCK_REALTIME, &ts);
1177    if (rc < 0) {
1178        isValidTimeout = 0;
1179        ALOGE("%s: Error reading the real time clock!!", __func__);
1180    }
1181    else {
1182        // Make timeout as 5 sec for request to be honored
1183        ts.tv_sec += 5;
1184    }
1185    //Block on conditional variable
1186    mPendingRequest = 1;
1187    while (mPendingRequest == 1) {
1188        if (!isValidTimeout) {
1189            ALOGV("%s: Blocking on conditional wait", __func__);
1190            pthread_cond_wait(&mRequestCond, &mMutex);
1191        }
1192        else {
1193            ALOGV("%s: Blocking on timed conditional wait", __func__);
1194            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1195            if (rc == ETIMEDOUT) {
1196                rc = -ENODEV;
1197                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1198                break;
1199            }
1200        }
1201        ALOGV("%s: Unblocked", __func__);
1202    }
1203
1204    pthread_mutex_unlock(&mMutex);
1205    return rc;
1206}
1207
1208/*===========================================================================
1209 * FUNCTION   : getMetadataVendorTagOps
1210 *
1211 * DESCRIPTION:
1212 *
1213 * PARAMETERS :
1214 *
1215 *
1216 * RETURN     :
1217 *==========================================================================*/
1218void QCamera3HardwareInterface::getMetadataVendorTagOps(
1219                    vendor_tag_query_ops_t* /*ops*/)
1220{
1221    /* Enable locks when we eventually add Vendor Tags */
1222    /*
1223    pthread_mutex_lock(&mMutex);
1224
1225    pthread_mutex_unlock(&mMutex);
1226    */
1227    return;
1228}
1229
1230/*===========================================================================
1231 * FUNCTION   : dump
1232 *
1233 * DESCRIPTION:
1234 *
1235 * PARAMETERS :
1236 *
1237 *
1238 * RETURN     :
1239 *==========================================================================*/
1240void QCamera3HardwareInterface::dump(int /*fd*/)
1241{
1242    /*Enable lock when we implement this function*/
1243    /*
1244    pthread_mutex_lock(&mMutex);
1245
1246    pthread_mutex_unlock(&mMutex);
1247    */
1248    return;
1249}
1250
1251/*===========================================================================
1252 * FUNCTION   : flush
1253 *
1254 * DESCRIPTION:
1255 *
1256 * PARAMETERS :
1257 *
1258 *
1259 * RETURN     :
1260 *==========================================================================*/
1261int QCamera3HardwareInterface::flush()
1262{
1263    /*Enable lock when we implement this function*/
1264    /*
1265    pthread_mutex_lock(&mMutex);
1266
1267    pthread_mutex_unlock(&mMutex);
1268    */
1269    return 0;
1270}
1271
1272/*===========================================================================
1273 * FUNCTION   : captureResultCb
1274 *
1275 * DESCRIPTION: Callback handler for all capture result
1276 *              (streams, as well as metadata)
1277 *
1278 * PARAMETERS :
1279 *   @metadata : metadata information
1280 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1281 *               NULL if metadata.
1282 *
1283 * RETURN     : NONE
1284 *==========================================================================*/
1285void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1286                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1287{
1288    pthread_mutex_lock(&mMutex);
1289
1290    if (metadata_buf) {
1291        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1292        int32_t frame_number_valid = *(int32_t *)
1293            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1294        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1295            CAM_INTF_META_PENDING_REQUESTS, metadata);
1296        uint32_t frame_number = *(uint32_t *)
1297            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1298        const struct timeval *tv = (const struct timeval *)
1299            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1300        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1301            tv->tv_usec * NSEC_PER_USEC;
1302
1303        if (!frame_number_valid) {
1304            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1305            mMetadataChannel->bufDone(metadata_buf);
1306            goto done_metadata;
1307        }
1308        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1309                frame_number, capture_time);
1310
1311        // Go through the pending requests info and send shutter/results to frameworks
1312        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1313                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1314            camera3_capture_result_t result;
1315            camera3_notify_msg_t notify_msg;
1316            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1317
1318            // Flush out all entries with less or equal frame numbers.
1319
1320            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1321            //Right now it's the same as metadata timestamp
1322
1323            //TODO: When there is metadata drop, how do we derive the timestamp of
1324            //dropped frames? For now, we fake the dropped timestamp by substracting
1325            //from the reported timestamp
1326            nsecs_t current_capture_time = capture_time -
1327                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1328
1329            // Send shutter notify to frameworks
1330            notify_msg.type = CAMERA3_MSG_SHUTTER;
1331            notify_msg.message.shutter.frame_number = i->frame_number;
1332            notify_msg.message.shutter.timestamp = current_capture_time;
1333            mCallbackOps->notify(mCallbackOps, &notify_msg);
1334            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1335                    i->frame_number, capture_time);
1336
1337            // Send empty metadata with already filled buffers for dropped metadata
1338            // and send valid metadata with already filled buffers for current metadata
1339            if (i->frame_number < frame_number) {
1340                CameraMetadata dummyMetadata;
1341                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1342                        &current_capture_time, 1);
1343                dummyMetadata.update(ANDROID_REQUEST_ID,
1344                        &(i->request_id), 1);
1345                result.result = dummyMetadata.release();
1346            } else {
1347                result.result = translateCbMetadataToResultMetadata(metadata,
1348                        current_capture_time, i->request_id);
1349                if (mIsZslMode) {
1350                   int found_metadata = 0;
1351                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1352                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1353                        j != i->buffers.end(); j++) {
1354                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1355                         //check if corresp. zsl already exists in the stored metadata list
1356                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1357                               m != mStoredMetadataList.begin(); m++) {
1358                            if (m->frame_number == frame_number) {
1359                               m->meta_buf = metadata_buf;
1360                               found_metadata = 1;
1361                               break;
1362                            }
1363                         }
1364                         if (!found_metadata) {
1365                            MetadataBufferInfo store_meta_info;
1366                            store_meta_info.meta_buf = metadata_buf;
1367                            store_meta_info.frame_number = frame_number;
1368                            mStoredMetadataList.push_back(store_meta_info);
1369                            found_metadata = 1;
1370                         }
1371                      }
1372                   }
1373                   if (!found_metadata) {
1374                       if (!i->input_buffer_present && i->blob_request) {
1375                          //livesnapshot or fallback non-zsl snapshot case
1376                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1377                                j != i->buffers.end(); j++){
1378                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1379                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1380                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1381                                 break;
1382                              }
1383                         }
1384                       } else {
1385                            //return the metadata immediately
1386                            mMetadataChannel->bufDone(metadata_buf);
1387                            free(metadata_buf);
1388                       }
1389                   }
1390               } else if (!mIsZslMode && i->blob_request) {
1391                   //If it is a blob request then send the metadata to the picture channel
1392                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1393               } else {
1394                   // Return metadata buffer
1395                   mMetadataChannel->bufDone(metadata_buf);
1396                   free(metadata_buf);
1397               }
1398
1399            }
1400            if (!result.result) {
1401                ALOGE("%s: metadata is NULL", __func__);
1402            }
1403            result.frame_number = i->frame_number;
1404            result.num_output_buffers = 0;
1405            result.output_buffers = NULL;
1406            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1407                    j != i->buffers.end(); j++) {
1408                if (j->buffer) {
1409                    result.num_output_buffers++;
1410                }
1411            }
1412
1413            if (result.num_output_buffers > 0) {
1414                camera3_stream_buffer_t *result_buffers =
1415                    new camera3_stream_buffer_t[result.num_output_buffers];
1416                if (!result_buffers) {
1417                    ALOGE("%s: Fatal error: out of memory", __func__);
1418                }
1419                size_t result_buffers_idx = 0;
1420                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1421                        j != i->buffers.end(); j++) {
1422                    if (j->buffer) {
1423                        result_buffers[result_buffers_idx++] = *(j->buffer);
1424                        free(j->buffer);
1425                        j->buffer = NULL;
1426                        mPendingBuffersMap.editValueFor(j->stream)--;
1427                    }
1428                }
1429                result.output_buffers = result_buffers;
1430
1431                mCallbackOps->process_capture_result(mCallbackOps, &result);
1432                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1433                        __func__, result.frame_number, current_capture_time);
1434                free_camera_metadata((camera_metadata_t *)result.result);
1435                delete[] result_buffers;
1436            } else {
1437                mCallbackOps->process_capture_result(mCallbackOps, &result);
1438                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1439                        __func__, result.frame_number, current_capture_time);
1440                free_camera_metadata((camera_metadata_t *)result.result);
1441            }
1442            // erase the element from the list
1443            i = mPendingRequestsList.erase(i);
1444        }
1445
1446
1447done_metadata:
1448        bool max_buffers_dequeued = false;
1449        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1450            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1451            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1452            if (queued_buffers == stream->max_buffers) {
1453                max_buffers_dequeued = true;
1454                break;
1455            }
1456        }
1457        if (!max_buffers_dequeued && !pending_requests) {
1458            // Unblock process_capture_request
1459            mPendingRequest = 0;
1460            pthread_cond_signal(&mRequestCond);
1461        }
1462    } else {
1463        // If the frame number doesn't exist in the pending request list,
1464        // directly send the buffer to the frameworks, and update pending buffers map
1465        // Otherwise, book-keep the buffer.
1466        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1467        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1468            i++;
1469        }
1470        if (i == mPendingRequestsList.end()) {
1471            // Verify all pending requests frame_numbers are greater
1472            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1473                    j != mPendingRequestsList.end(); j++) {
1474                if (j->frame_number < frame_number) {
1475                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1476                            __func__, j->frame_number, frame_number);
1477                }
1478            }
1479            camera3_capture_result_t result;
1480            result.result = NULL;
1481            result.frame_number = frame_number;
1482            result.num_output_buffers = 1;
1483            result.output_buffers = buffer;
1484            ALOGV("%s: result frame_number = %d, buffer = %p",
1485                    __func__, frame_number, buffer);
1486            mPendingBuffersMap.editValueFor(buffer->stream)--;
1487            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1488                int found = 0;
1489                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1490                      k != mStoredMetadataList.end(); k++) {
1491                    if (k->frame_number == frame_number) {
1492                        k->zsl_buf_hdl = buffer->buffer;
1493                        found = 1;
1494                        break;
1495                    }
1496                }
1497                if (!found) {
1498                   MetadataBufferInfo meta_info;
1499                   meta_info.frame_number = frame_number;
1500                   meta_info.zsl_buf_hdl = buffer->buffer;
1501                   mStoredMetadataList.push_back(meta_info);
1502                }
1503            }
1504            mCallbackOps->process_capture_result(mCallbackOps, &result);
1505        } else {
1506            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1507                    j != i->buffers.end(); j++) {
1508                if (j->stream == buffer->stream) {
1509                    if (j->buffer != NULL) {
1510                        ALOGE("%s: Error: buffer is already set", __func__);
1511                    } else {
1512                        j->buffer = (camera3_stream_buffer_t *)malloc(
1513                                sizeof(camera3_stream_buffer_t));
1514                        *(j->buffer) = *buffer;
1515                        ALOGV("%s: cache buffer %p at result frame_number %d",
1516                                __func__, buffer, frame_number);
1517                    }
1518                }
1519            }
1520        }
1521    }
1522    pthread_mutex_unlock(&mMutex);
1523    return;
1524}
1525
1526/*===========================================================================
1527 * FUNCTION   : translateCbMetadataToResultMetadata
1528 *
1529 * DESCRIPTION:
1530 *
1531 * PARAMETERS :
1532 *   @metadata : metadata information from callback
1533 *
1534 * RETURN     : camera_metadata_t*
1535 *              metadata in a format specified by fwk
1536 *==========================================================================*/
1537camera_metadata_t*
1538QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1539                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1540                                 int32_t request_id)
1541{
1542    CameraMetadata camMetadata;
1543    camera_metadata_t* resultMetadata;
1544
1545    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1546    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1547
1548    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1549    uint8_t next_entry;
1550    while (curr_entry != CAM_INTF_PARM_MAX) {
1551       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1552       switch (curr_entry) {
1553         case CAM_INTF_META_FACE_DETECTION:{
1554             cam_face_detection_data_t *faceDetectionInfo =
1555                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1556             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1557             int32_t faceIds[numFaces];
1558             uint8_t faceScores[numFaces];
1559             int32_t faceRectangles[numFaces * 4];
1560             int32_t faceLandmarks[numFaces * 6];
1561             int j = 0, k = 0;
1562             for (int i = 0; i < numFaces; i++) {
1563                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1564                 faceScores[i] = faceDetectionInfo->faces[i].score;
1565                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1566                         faceRectangles+j, -1);
1567                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1568                 j+= 4;
1569                 k+= 6;
1570             }
1571             if (numFaces > 0) {
1572                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1573                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1574                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1575                     faceRectangles, numFaces*4);
1576                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1577                     faceLandmarks, numFaces*6);
1578             }
1579            break;
1580            }
1581         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1582             uint8_t  *color_correct_mode =
1583                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1584             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1585             break;
1586          }
1587         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1588             int32_t  *ae_precapture_id =
1589                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1590             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1591             break;
1592          }
1593         case CAM_INTF_META_AEC_ROI: {
1594            cam_area_t  *hAeRegions =
1595                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1596             int32_t aeRegions[5];
1597             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1598             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1599             break;
1600          }
1601          case CAM_INTF_META_AEC_STATE:{
1602             uint8_t *ae_state =
1603                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1604             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1605             break;
1606          }
1607          case CAM_INTF_PARM_FOCUS_MODE:{
1608             uint8_t  *focusMode =
1609                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1610             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1611                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1612             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1613             break;
1614          }
1615          case CAM_INTF_META_AF_ROI:{
1616             /*af regions*/
1617             cam_area_t  *hAfRegions =
1618                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1619             int32_t afRegions[5];
1620             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1621             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1622             break;
1623          }
1624          case CAM_INTF_META_AF_STATE: {
1625             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1626             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1627             break;
1628          }
1629          case CAM_INTF_META_AF_TRIGGER_ID: {
1630             int32_t  *afTriggerId =
1631                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1632             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1633             break;
1634          }
1635          case CAM_INTF_PARM_WHITE_BALANCE: {
1636               uint8_t  *whiteBalance =
1637                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1638               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1639                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1640                   *whiteBalance);
1641               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1642               break;
1643          }
1644          case CAM_INTF_META_AWB_REGIONS: {
1645             /*awb regions*/
1646             cam_area_t  *hAwbRegions =
1647                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1648             int32_t awbRegions[5];
1649             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1650             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1651             break;
1652          }
1653          case CAM_INTF_META_AWB_STATE: {
1654             uint8_t  *whiteBalanceState =
1655                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1656             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1657             break;
1658          }
1659          case CAM_INTF_META_MODE: {
1660             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1661             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1662             break;
1663          }
1664          case CAM_INTF_META_EDGE_MODE: {
1665             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1666             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1667             break;
1668          }
1669          case CAM_INTF_META_FLASH_POWER: {
1670             uint8_t  *flashPower =
1671                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1672             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1673             break;
1674          }
1675          case CAM_INTF_META_FLASH_FIRING_TIME: {
1676             int64_t  *flashFiringTime =
1677                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1678             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1679             break;
1680          }
1681          case CAM_INTF_META_FLASH_STATE: {
1682             uint8_t  *flashState =
1683                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1684             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1685             break;
1686          }
1687          case CAM_INTF_META_HOTPIXEL_MODE: {
1688              uint8_t  *hotPixelMode =
1689                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1690              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1691              break;
1692          }
1693          case CAM_INTF_META_LENS_APERTURE:{
1694             float  *lensAperture =
1695                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1696             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1697             break;
1698          }
1699          case CAM_INTF_META_LENS_FILTERDENSITY: {
1700             float  *filterDensity =
1701                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1702             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1703             break;
1704          }
1705          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1706             float  *focalLength =
1707                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1708             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1709             break;
1710          }
1711          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1712             float  *focusDistance =
1713                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1714             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1715             break;
1716          }
1717          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1718             float  *focusRange =
1719                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1720             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1721          }
1722          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1723             uint8_t  *opticalStab =
1724                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1725             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1726          }
1727          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1728             uint8_t  *noiseRedMode =
1729                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1730             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1731             break;
1732          }
1733          case CAM_INTF_META_SCALER_CROP_REGION: {
1734             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1735             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1736             int32_t scalerCropRegion[4];
1737             scalerCropRegion[0] = hScalerCropRegion->left;
1738             scalerCropRegion[1] = hScalerCropRegion->top;
1739             scalerCropRegion[2] = hScalerCropRegion->width;
1740             scalerCropRegion[3] = hScalerCropRegion->height;
1741             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1742             break;
1743          }
1744          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1745             int64_t  *sensorExpTime =
1746                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1747             mMetadataResponse.exposure_time = *sensorExpTime;
1748             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1749             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1750             break;
1751          }
1752          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1753             int64_t  *sensorFameDuration =
1754                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1755             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1756             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1757             break;
1758          }
1759          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1760             int32_t  *sensorSensitivity =
1761                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1762             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1763             mMetadataResponse.iso_speed = *sensorSensitivity;
1764             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1765             break;
1766          }
1767          case CAM_INTF_META_SHADING_MODE: {
1768             uint8_t  *shadingMode =
1769                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1770             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1771             break;
1772          }
1773          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1774             uint8_t  *faceDetectMode =
1775                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1776             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1777                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1778                                                        *faceDetectMode);
1779             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1780             break;
1781          }
1782          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1783             uint8_t  *histogramMode =
1784                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1785             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1786             break;
1787          }
1788          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1789               uint8_t  *sharpnessMapMode =
1790                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1791               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1792                                  sharpnessMapMode, 1);
1793               break;
1794           }
1795          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1796               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1797               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1798               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1799                                  (int32_t*)sharpnessMap->sharpness,
1800                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1801               break;
1802          }
1803          case CAM_INTF_META_LENS_SHADING_MAP: {
1804               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1805               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1806               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1807               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1808               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1809                                  (float*)lensShadingMap->lens_shading,
1810                                  4*map_width*map_height);
1811               break;
1812          }
1813          case CAM_INTF_META_TONEMAP_CURVES:{
1814             //Populate CAM_INTF_META_TONEMAP_CURVES
1815             /* ch0 = G, ch 1 = B, ch 2 = R*/
1816             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1817             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1818             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1819                                (float*)tonemap->curves[0].tonemap_points,
1820                                tonemap->tonemap_points_cnt * 2);
1821
1822             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1823                                (float*)tonemap->curves[1].tonemap_points,
1824                                tonemap->tonemap_points_cnt * 2);
1825
1826             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1827                                (float*)tonemap->curves[2].tonemap_points,
1828                                tonemap->tonemap_points_cnt * 2);
1829             break;
1830          }
1831          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1832             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1833             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1834             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1835             break;
1836          }
1837          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1838              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1839              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1840              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1841                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1842              break;
1843          }
1844          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1845             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1846             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1847             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1848                       predColorCorrectionGains->gains, 4);
1849             break;
1850          }
1851          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1852             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1853                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1854             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1855                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1856             break;
1857
1858          }
1859          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1860             uint8_t *blackLevelLock = (uint8_t*)
1861               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1862             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1863             break;
1864          }
1865          case CAM_INTF_META_SCENE_FLICKER:{
1866             uint8_t *sceneFlicker = (uint8_t*)
1867             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1868             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1869             break;
1870          }
1871          case CAM_INTF_PARM_LED_MODE:
1872             break;
1873          default:
1874             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1875                   __func__, curr_entry);
1876             break;
1877       }
1878       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1879       curr_entry = next_entry;
1880    }
1881    resultMetadata = camMetadata.release();
1882    return resultMetadata;
1883}
1884
1885/*===========================================================================
1886 * FUNCTION   : convertToRegions
1887 *
1888 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1889 *
1890 * PARAMETERS :
1891 *   @rect   : cam_rect_t struct to convert
1892 *   @region : int32_t destination array
1893 *   @weight : if we are converting from cam_area_t, weight is valid
1894 *             else weight = -1
1895 *
1896 *==========================================================================*/
1897void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1898    region[0] = rect.left;
1899    region[1] = rect.top;
1900    region[2] = rect.left + rect.width;
1901    region[3] = rect.top + rect.height;
1902    if (weight > -1) {
1903        region[4] = weight;
1904    }
1905}
1906
1907/*===========================================================================
1908 * FUNCTION   : convertFromRegions
1909 *
1910 * DESCRIPTION: helper method to convert from array to cam_rect_t
1911 *
1912 * PARAMETERS :
1913 *   @rect   : cam_rect_t struct to convert
1914 *   @region : int32_t destination array
1915 *   @weight : if we are converting from cam_area_t, weight is valid
1916 *             else weight = -1
1917 *
1918 *==========================================================================*/
1919void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1920                                                   const camera_metadata_t *settings,
1921                                                   uint32_t tag){
1922    CameraMetadata frame_settings;
1923    frame_settings = settings;
1924    int32_t x_min = frame_settings.find(tag).data.i32[0];
1925    int32_t y_min = frame_settings.find(tag).data.i32[1];
1926    int32_t x_max = frame_settings.find(tag).data.i32[2];
1927    int32_t y_max = frame_settings.find(tag).data.i32[3];
1928    roi->weight = frame_settings.find(tag).data.i32[4];
1929    roi->rect.left = x_min;
1930    roi->rect.top = y_min;
1931    roi->rect.width = x_max - x_min;
1932    roi->rect.height = y_max - y_min;
1933}
1934
1935/*===========================================================================
1936 * FUNCTION   : resetIfNeededROI
1937 *
1938 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1939 *              crop region
1940 *
1941 * PARAMETERS :
1942 *   @roi       : cam_area_t struct to resize
1943 *   @scalerCropRegion : cam_crop_region_t region to compare against
1944 *
1945 *
1946 *==========================================================================*/
1947bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1948                                                 const cam_crop_region_t* scalerCropRegion)
1949{
1950    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1951    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1952    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1953    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1954    if ((roi_x_max < scalerCropRegion->left) ||
1955        (roi_y_max < scalerCropRegion->top)  ||
1956        (roi->rect.left > crop_x_max) ||
1957        (roi->rect.top > crop_y_max)){
1958        return false;
1959    }
1960    if (roi->rect.left < scalerCropRegion->left) {
1961        roi->rect.left = scalerCropRegion->left;
1962    }
1963    if (roi->rect.top < scalerCropRegion->top) {
1964        roi->rect.top = scalerCropRegion->top;
1965    }
1966    if (roi_x_max > crop_x_max) {
1967        roi_x_max = crop_x_max;
1968    }
1969    if (roi_y_max > crop_y_max) {
1970        roi_y_max = crop_y_max;
1971    }
1972    roi->rect.width = roi_x_max - roi->rect.left;
1973    roi->rect.height = roi_y_max - roi->rect.top;
1974    return true;
1975}
1976
1977/*===========================================================================
1978 * FUNCTION   : convertLandmarks
1979 *
1980 * DESCRIPTION: helper method to extract the landmarks from face detection info
1981 *
1982 * PARAMETERS :
1983 *   @face   : cam_rect_t struct to convert
1984 *   @landmarks : int32_t destination array
1985 *
1986 *
1987 *==========================================================================*/
1988void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1989{
1990    landmarks[0] = face.left_eye_center.x;
1991    landmarks[1] = face.left_eye_center.y;
1992    landmarks[2] = face.right_eye_center.y;
1993    landmarks[3] = face.right_eye_center.y;
1994    landmarks[4] = face.mouth_center.x;
1995    landmarks[5] = face.mouth_center.y;
1996}
1997
1998#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1999/*===========================================================================
2000 * FUNCTION   : initCapabilities
2001 *
2002 * DESCRIPTION: initialize camera capabilities in static data struct
2003 *
2004 * PARAMETERS :
2005 *   @cameraId  : camera Id
2006 *
2007 * RETURN     : int32_t type of status
2008 *              NO_ERROR  -- success
2009 *              none-zero failure code
2010 *==========================================================================*/
2011int QCamera3HardwareInterface::initCapabilities(int cameraId)
2012{
2013    int rc = 0;
2014    mm_camera_vtbl_t *cameraHandle = NULL;
2015    QCamera3HeapMemory *capabilityHeap = NULL;
2016
2017    cameraHandle = camera_open(cameraId);
2018    if (!cameraHandle) {
2019        ALOGE("%s: camera_open failed", __func__);
2020        rc = -1;
2021        goto open_failed;
2022    }
2023
2024    capabilityHeap = new QCamera3HeapMemory();
2025    if (capabilityHeap == NULL) {
2026        ALOGE("%s: creation of capabilityHeap failed", __func__);
2027        goto heap_creation_failed;
2028    }
2029    /* Allocate memory for capability buffer */
2030    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2031    if(rc != OK) {
2032        ALOGE("%s: No memory for cappability", __func__);
2033        goto allocate_failed;
2034    }
2035
2036    /* Map memory for capability buffer */
2037    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2038    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2039                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2040                                capabilityHeap->getFd(0),
2041                                sizeof(cam_capability_t));
2042    if(rc < 0) {
2043        ALOGE("%s: failed to map capability buffer", __func__);
2044        goto map_failed;
2045    }
2046
2047    /* Query Capability */
2048    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2049    if(rc < 0) {
2050        ALOGE("%s: failed to query capability",__func__);
2051        goto query_failed;
2052    }
2053    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2054    if (!gCamCapability[cameraId]) {
2055        ALOGE("%s: out of memory", __func__);
2056        goto query_failed;
2057    }
2058    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2059                                        sizeof(cam_capability_t));
2060    rc = 0;
2061
2062query_failed:
2063    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2064                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2065map_failed:
2066    capabilityHeap->deallocate();
2067allocate_failed:
2068    delete capabilityHeap;
2069heap_creation_failed:
2070    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2071    cameraHandle = NULL;
2072open_failed:
2073    return rc;
2074}
2075
2076/*===========================================================================
2077 * FUNCTION   : initParameters
2078 *
2079 * DESCRIPTION: initialize camera parameters
2080 *
2081 * PARAMETERS :
2082 *
2083 * RETURN     : int32_t type of status
2084 *              NO_ERROR  -- success
2085 *              none-zero failure code
2086 *==========================================================================*/
2087int QCamera3HardwareInterface::initParameters()
2088{
2089    int rc = 0;
2090
2091    //Allocate Set Param Buffer
2092    mParamHeap = new QCamera3HeapMemory();
2093    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2094    if(rc != OK) {
2095        rc = NO_MEMORY;
2096        ALOGE("Failed to allocate SETPARM Heap memory");
2097        delete mParamHeap;
2098        mParamHeap = NULL;
2099        return rc;
2100    }
2101
2102    //Map memory for parameters buffer
2103    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2104            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2105            mParamHeap->getFd(0),
2106            sizeof(parm_buffer_t));
2107    if(rc < 0) {
2108        ALOGE("%s:failed to map SETPARM buffer",__func__);
2109        rc = FAILED_TRANSACTION;
2110        mParamHeap->deallocate();
2111        delete mParamHeap;
2112        mParamHeap = NULL;
2113        return rc;
2114    }
2115
2116    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2117    return rc;
2118}
2119
2120/*===========================================================================
2121 * FUNCTION   : deinitParameters
2122 *
2123 * DESCRIPTION: de-initialize camera parameters
2124 *
2125 * PARAMETERS :
2126 *
2127 * RETURN     : NONE
2128 *==========================================================================*/
2129void QCamera3HardwareInterface::deinitParameters()
2130{
2131    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2132            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2133
2134    mParamHeap->deallocate();
2135    delete mParamHeap;
2136    mParamHeap = NULL;
2137
2138    mParameters = NULL;
2139}
2140
2141/*===========================================================================
2142 * FUNCTION   : calcMaxJpegSize
2143 *
2144 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2145 *
2146 * PARAMETERS :
2147 *
2148 * RETURN     : max_jpeg_size
2149 *==========================================================================*/
2150int QCamera3HardwareInterface::calcMaxJpegSize()
2151{
2152    int32_t max_jpeg_size = 0;
2153    int temp_width, temp_height;
2154    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2155        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2156        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2157        if (temp_width * temp_height > max_jpeg_size ) {
2158            max_jpeg_size = temp_width * temp_height;
2159        }
2160    }
2161    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2162    return max_jpeg_size;
2163}
2164
2165/*===========================================================================
2166 * FUNCTION   : initStaticMetadata
2167 *
2168 * DESCRIPTION: initialize the static metadata
2169 *
2170 * PARAMETERS :
2171 *   @cameraId  : camera Id
2172 *
2173 * RETURN     : int32_t type of status
2174 *              0  -- success
2175 *              non-zero failure code
2176 *==========================================================================*/
2177int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2178{
2179    int rc = 0;
2180    CameraMetadata staticInfo;
2181
2182    /* android.info: hardware level */
2183    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2184    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2185        &supportedHardwareLevel, 1);
2186
2187    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2188    /*HAL 3 only*/
2189    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2190                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2191
2192    /*hard coded for now but this should come from sensor*/
2193    float min_focus_distance;
2194    if(facingBack){
2195        min_focus_distance = 10;
2196    } else {
2197        min_focus_distance = 0;
2198    }
2199    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2200                    &min_focus_distance, 1);
2201
2202    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2203                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2204
2205    /*should be using focal lengths but sensor doesn't provide that info now*/
2206    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2207                      &gCamCapability[cameraId]->focal_length,
2208                      1);
2209
2210    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2211                      gCamCapability[cameraId]->apertures,
2212                      gCamCapability[cameraId]->apertures_count);
2213
2214    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2215                gCamCapability[cameraId]->filter_densities,
2216                gCamCapability[cameraId]->filter_densities_count);
2217
2218
2219    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2220                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2221                      gCamCapability[cameraId]->optical_stab_modes_count);
2222
2223    staticInfo.update(ANDROID_LENS_POSITION,
2224                      gCamCapability[cameraId]->lens_position,
2225                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2226
2227    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2228                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2229    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2230                      lens_shading_map_size,
2231                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2232
2233    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2234                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2235    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2236            geo_correction_map_size,
2237            sizeof(geo_correction_map_size)/sizeof(int32_t));
2238
2239    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2240                       gCamCapability[cameraId]->geo_correction_map,
2241                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2242
2243    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2244            gCamCapability[cameraId]->sensor_physical_size, 2);
2245
2246    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2247            gCamCapability[cameraId]->exposure_time_range, 2);
2248
2249    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2250            &gCamCapability[cameraId]->max_frame_duration, 1);
2251
2252    camera_metadata_rational baseGainFactor = {
2253            gCamCapability[cameraId]->base_gain_factor.numerator,
2254            gCamCapability[cameraId]->base_gain_factor.denominator};
2255    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2256                      &baseGainFactor, 1);
2257
2258    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2259                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2260
2261    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2262                                               gCamCapability[cameraId]->pixel_array_size.height};
2263    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2264                      pixel_array_size, 2);
2265
2266    int32_t active_array_size[] = {0, 0,
2267                                                gCamCapability[cameraId]->active_array_size.width,
2268                                                gCamCapability[cameraId]->active_array_size.height};
2269    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2270                      active_array_size, 4);
2271
2272    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2273            &gCamCapability[cameraId]->white_level, 1);
2274
2275    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2276            gCamCapability[cameraId]->black_level_pattern, 4);
2277
2278    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2279                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2280
2281    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2282                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2283
2284    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2285                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2286
2287    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2288                      &gCamCapability[cameraId]->histogram_size, 1);
2289
2290    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2291            &gCamCapability[cameraId]->max_histogram_count, 1);
2292
2293    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2294                                                gCamCapability[cameraId]->sharpness_map_size.height};
2295
2296    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2297            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2298
2299    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2300            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2301
2302
2303    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2304                      &gCamCapability[cameraId]->raw_min_duration,
2305                       1);
2306
2307    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2308                                                HAL_PIXEL_FORMAT_BLOB};
2309    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2310    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2311                      scalar_formats,
2312                      scalar_formats_count);
2313
2314    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2315    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2316              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2317              available_processed_sizes);
2318    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2319                available_processed_sizes,
2320                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2321
2322    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2323                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2324                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2325
2326    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2327    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2328                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2329                 available_fps_ranges);
2330    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2331            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2332
2333    camera_metadata_rational exposureCompensationStep = {
2334            gCamCapability[cameraId]->exp_compensation_step.numerator,
2335            gCamCapability[cameraId]->exp_compensation_step.denominator};
2336    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2337                      &exposureCompensationStep, 1);
2338
2339    /*TO DO*/
2340    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2341    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2342                      availableVstabModes, sizeof(availableVstabModes));
2343
2344    /*HAL 1 and HAL 3 common*/
2345    float maxZoom = 4;
2346    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2347            &maxZoom, 1);
2348
2349    int32_t max3aRegions = 1;
2350    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2351            &max3aRegions, 1);
2352
2353    uint8_t availableFaceDetectModes[] = {
2354            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2355            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2356    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2357                      availableFaceDetectModes,
2358                      sizeof(availableFaceDetectModes));
2359
2360    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2361                                       gCamCapability[cameraId]->raw_dim.height};
2362    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2363                      raw_size,
2364                      sizeof(raw_size)/sizeof(uint32_t));
2365
2366    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2367                                                        gCamCapability[cameraId]->exposure_compensation_max};
2368    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2369            exposureCompensationRange,
2370            sizeof(exposureCompensationRange)/sizeof(int32_t));
2371
2372    uint8_t lensFacing = (facingBack) ?
2373            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2374    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2375
2376    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2377                available_processed_sizes,
2378                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2379
2380    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2381                      available_thumbnail_sizes,
2382                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2383
2384    int32_t max_jpeg_size = 0;
2385    int temp_width, temp_height;
2386    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2387        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2388        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2389        if (temp_width * temp_height > max_jpeg_size ) {
2390            max_jpeg_size = temp_width * temp_height;
2391        }
2392    }
2393    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2394    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2395                      &max_jpeg_size, 1);
2396
2397    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2398    int32_t size = 0;
2399    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2400        int val = lookupFwkName(EFFECT_MODES_MAP,
2401                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2402                                   gCamCapability[cameraId]->supported_effects[i]);
2403        if (val != NAME_NOT_FOUND) {
2404            avail_effects[size] = (uint8_t)val;
2405            size++;
2406        }
2407    }
2408    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2409                      avail_effects,
2410                      size);
2411
2412    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2413    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2414    int32_t supported_scene_modes_cnt = 0;
2415    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2416        int val = lookupFwkName(SCENE_MODES_MAP,
2417                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2418                                gCamCapability[cameraId]->supported_scene_modes[i]);
2419        if (val != NAME_NOT_FOUND) {
2420            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2421            supported_indexes[supported_scene_modes_cnt] = i;
2422            supported_scene_modes_cnt++;
2423        }
2424    }
2425
2426    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2427                      avail_scene_modes,
2428                      supported_scene_modes_cnt);
2429
2430    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2431    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2432                      supported_scene_modes_cnt,
2433                      scene_mode_overrides,
2434                      supported_indexes,
2435                      cameraId);
2436    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2437                      scene_mode_overrides,
2438                      supported_scene_modes_cnt*3);
2439
2440    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2441    size = 0;
2442    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2443        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2444                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2445                                 gCamCapability[cameraId]->supported_antibandings[i]);
2446        if (val != NAME_NOT_FOUND) {
2447            avail_antibanding_modes[size] = (uint8_t)val;
2448            size++;
2449        }
2450
2451    }
2452    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2453                      avail_antibanding_modes,
2454                      size);
2455
2456    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2457    size = 0;
2458    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2459        int val = lookupFwkName(FOCUS_MODES_MAP,
2460                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2461                                gCamCapability[cameraId]->supported_focus_modes[i]);
2462        if (val != NAME_NOT_FOUND) {
2463            avail_af_modes[size] = (uint8_t)val;
2464            size++;
2465        }
2466    }
2467    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2468                      avail_af_modes,
2469                      size);
2470
2471    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2472    size = 0;
2473    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2474        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2475                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2476                                    gCamCapability[cameraId]->supported_white_balances[i]);
2477        if (val != NAME_NOT_FOUND) {
2478            avail_awb_modes[size] = (uint8_t)val;
2479            size++;
2480        }
2481    }
2482    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2483                      avail_awb_modes,
2484                      size);
2485
2486    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2487    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2488      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2489
2490    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2491            available_flash_levels,
2492            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2493
2494
2495    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2496    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2497            &flashAvailable, 1);
2498
2499    uint8_t avail_ae_modes[5];
2500    size = 0;
2501    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2502        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2503        size++;
2504    }
2505    if (flashAvailable) {
2506        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2507        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2508        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2509    }
2510    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2511                      avail_ae_modes,
2512                      size);
2513
2514    int32_t sensitivity_range[2];
2515    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2516    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2517    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2518                      sensitivity_range,
2519                      sizeof(sensitivity_range) / sizeof(int32_t));
2520
2521    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2522                      &gCamCapability[cameraId]->max_analog_sensitivity,
2523                      1);
2524
2525    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2526                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2527                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2528
2529    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2530    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2531                      &sensor_orientation,
2532                      1);
2533
2534    int32_t max_output_streams[3] = {1, 3, 1};
2535    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2536                      max_output_streams,
2537                      3);
2538
2539    gStaticMetadata[cameraId] = staticInfo.release();
2540    return rc;
2541}
2542
2543/*===========================================================================
2544 * FUNCTION   : makeTable
2545 *
2546 * DESCRIPTION: make a table of sizes
2547 *
2548 * PARAMETERS :
2549 *
2550 *
2551 *==========================================================================*/
2552void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2553                                          int32_t* sizeTable)
2554{
2555    int j = 0;
2556    for (int i = 0; i < size; i++) {
2557        sizeTable[j] = dimTable[i].width;
2558        sizeTable[j+1] = dimTable[i].height;
2559        j+=2;
2560    }
2561}
2562
2563/*===========================================================================
2564 * FUNCTION   : makeFPSTable
2565 *
2566 * DESCRIPTION: make a table of fps ranges
2567 *
2568 * PARAMETERS :
2569 *
2570 *==========================================================================*/
2571void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2572                                          int32_t* fpsRangesTable)
2573{
2574    int j = 0;
2575    for (int i = 0; i < size; i++) {
2576        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2577        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2578        j+=2;
2579    }
2580}
2581
2582/*===========================================================================
2583 * FUNCTION   : makeOverridesList
2584 *
2585 * DESCRIPTION: make a list of scene mode overrides
2586 *
2587 * PARAMETERS :
2588 *
2589 *
2590 *==========================================================================*/
2591void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2592                                                  uint8_t size, uint8_t* overridesList,
2593                                                  uint8_t* supported_indexes,
2594                                                  int camera_id)
2595{
2596    /*daemon will give a list of overrides for all scene modes.
2597      However we should send the fwk only the overrides for the scene modes
2598      supported by the framework*/
2599    int j = 0, index = 0, supt = 0;
2600    uint8_t focus_override;
2601    for (int i = 0; i < size; i++) {
2602        supt = 0;
2603        index = supported_indexes[i];
2604        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2605        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2606                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2607                                                    overridesTable[index].awb_mode);
2608        focus_override = (uint8_t)overridesTable[index].af_mode;
2609        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2610           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2611              supt = 1;
2612              break;
2613           }
2614        }
2615        if (supt) {
2616           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2617                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2618                                              focus_override);
2619        } else {
2620           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2621        }
2622        j+=3;
2623    }
2624}
2625
2626/*===========================================================================
2627 * FUNCTION   : getPreviewHalPixelFormat
2628 *
2629 * DESCRIPTION: convert the format to type recognized by framework
2630 *
2631 * PARAMETERS : format : the format from backend
2632 *
2633 ** RETURN    : format recognized by framework
2634 *
2635 *==========================================================================*/
2636int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2637{
2638    int32_t halPixelFormat;
2639
2640    switch (format) {
2641    case CAM_FORMAT_YUV_420_NV12:
2642        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2643        break;
2644    case CAM_FORMAT_YUV_420_NV21:
2645        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2646        break;
2647    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2648        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2649        break;
2650    case CAM_FORMAT_YUV_420_YV12:
2651        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2652        break;
2653    case CAM_FORMAT_YUV_422_NV16:
2654    case CAM_FORMAT_YUV_422_NV61:
2655    default:
2656        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2657        break;
2658    }
2659    return halPixelFormat;
2660}
2661
2662/*===========================================================================
2663 * FUNCTION   : getSensorSensitivity
2664 *
2665 * DESCRIPTION: convert iso_mode to an integer value
2666 *
2667 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2668 *
2669 ** RETURN    : sensitivity supported by sensor
2670 *
2671 *==========================================================================*/
2672int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2673{
2674    int32_t sensitivity;
2675
2676    switch (iso_mode) {
2677    case CAM_ISO_MODE_100:
2678        sensitivity = 100;
2679        break;
2680    case CAM_ISO_MODE_200:
2681        sensitivity = 200;
2682        break;
2683    case CAM_ISO_MODE_400:
2684        sensitivity = 400;
2685        break;
2686    case CAM_ISO_MODE_800:
2687        sensitivity = 800;
2688        break;
2689    case CAM_ISO_MODE_1600:
2690        sensitivity = 1600;
2691        break;
2692    default:
2693        sensitivity = -1;
2694        break;
2695    }
2696    return sensitivity;
2697}
2698
2699
2700/*===========================================================================
2701 * FUNCTION   : AddSetParmEntryToBatch
2702 *
2703 * DESCRIPTION: add set parameter entry into batch
2704 *
2705 * PARAMETERS :
2706 *   @p_table     : ptr to parameter buffer
2707 *   @paramType   : parameter type
2708 *   @paramLength : length of parameter value
2709 *   @paramValue  : ptr to parameter value
2710 *
2711 * RETURN     : int32_t type of status
2712 *              NO_ERROR  -- success
2713 *              none-zero failure code
2714 *==========================================================================*/
2715int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2716                                                          cam_intf_parm_type_t paramType,
2717                                                          uint32_t paramLength,
2718                                                          void *paramValue)
2719{
2720    int position = paramType;
2721    int current, next;
2722
2723    /*************************************************************************
2724    *                 Code to take care of linking next flags                *
2725    *************************************************************************/
2726    current = GET_FIRST_PARAM_ID(p_table);
2727    if (position == current){
2728        //DO NOTHING
2729    } else if (position < current){
2730        SET_NEXT_PARAM_ID(position, p_table, current);
2731        SET_FIRST_PARAM_ID(p_table, position);
2732    } else {
2733        /* Search for the position in the linked list where we need to slot in*/
2734        while (position > GET_NEXT_PARAM_ID(current, p_table))
2735            current = GET_NEXT_PARAM_ID(current, p_table);
2736
2737        /*If node already exists no need to alter linking*/
2738        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2739            next = GET_NEXT_PARAM_ID(current, p_table);
2740            SET_NEXT_PARAM_ID(current, p_table, position);
2741            SET_NEXT_PARAM_ID(position, p_table, next);
2742        }
2743    }
2744
2745    /*************************************************************************
2746    *                   Copy contents into entry                             *
2747    *************************************************************************/
2748
2749    if (paramLength > sizeof(parm_type_t)) {
2750        ALOGE("%s:Size of input larger than max entry size",__func__);
2751        return BAD_VALUE;
2752    }
2753    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2754    return NO_ERROR;
2755}
2756
2757/*===========================================================================
2758 * FUNCTION   : lookupFwkName
2759 *
2760 * DESCRIPTION: In case the enum is not same in fwk and backend
2761 *              make sure the parameter is correctly propogated
2762 *
2763 * PARAMETERS  :
2764 *   @arr      : map between the two enums
2765 *   @len      : len of the map
2766 *   @hal_name : name of the hal_parm to map
2767 *
2768 * RETURN     : int type of status
2769 *              fwk_name  -- success
2770 *              none-zero failure code
2771 *==========================================================================*/
2772int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2773                                             int len, int hal_name)
2774{
2775
2776    for (int i = 0; i < len; i++) {
2777        if (arr[i].hal_name == hal_name)
2778            return arr[i].fwk_name;
2779    }
2780
2781    /* Not able to find matching framework type is not necessarily
2782     * an error case. This happens when mm-camera supports more attributes
2783     * than the frameworks do */
2784    ALOGD("%s: Cannot find matching framework type", __func__);
2785    return NAME_NOT_FOUND;
2786}
2787
2788/*===========================================================================
2789 * FUNCTION   : lookupHalName
2790 *
2791 * DESCRIPTION: In case the enum is not same in fwk and backend
2792 *              make sure the parameter is correctly propogated
2793 *
2794 * PARAMETERS  :
2795 *   @arr      : map between the two enums
2796 *   @len      : len of the map
2797 *   @fwk_name : name of the hal_parm to map
2798 *
2799 * RETURN     : int32_t type of status
2800 *              hal_name  -- success
2801 *              none-zero failure code
2802 *==========================================================================*/
2803int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2804                                             int len, int fwk_name)
2805{
2806    for (int i = 0; i < len; i++) {
2807       if (arr[i].fwk_name == fwk_name)
2808           return arr[i].hal_name;
2809    }
2810    ALOGE("%s: Cannot find matching hal type", __func__);
2811    return NAME_NOT_FOUND;
2812}
2813
2814/*===========================================================================
2815 * FUNCTION   : getCapabilities
2816 *
2817 * DESCRIPTION: query camera capabilities
2818 *
2819 * PARAMETERS :
2820 *   @cameraId  : camera Id
2821 *   @info      : camera info struct to be filled in with camera capabilities
2822 *
2823 * RETURN     : int32_t type of status
2824 *              NO_ERROR  -- success
2825 *              none-zero failure code
2826 *==========================================================================*/
2827int QCamera3HardwareInterface::getCamInfo(int cameraId,
2828                                    struct camera_info *info)
2829{
2830    int rc = 0;
2831
2832    if (NULL == gCamCapability[cameraId]) {
2833        rc = initCapabilities(cameraId);
2834        if (rc < 0) {
2835            //pthread_mutex_unlock(&g_camlock);
2836            return rc;
2837        }
2838    }
2839
2840    if (NULL == gStaticMetadata[cameraId]) {
2841        rc = initStaticMetadata(cameraId);
2842        if (rc < 0) {
2843            return rc;
2844        }
2845    }
2846
2847    switch(gCamCapability[cameraId]->position) {
2848    case CAM_POSITION_BACK:
2849        info->facing = CAMERA_FACING_BACK;
2850        break;
2851
2852    case CAM_POSITION_FRONT:
2853        info->facing = CAMERA_FACING_FRONT;
2854        break;
2855
2856    default:
2857        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2858        rc = -1;
2859        break;
2860    }
2861
2862
2863    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2864    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2865    info->static_camera_characteristics = gStaticMetadata[cameraId];
2866
2867    return rc;
2868}
2869
2870/*===========================================================================
2871 * FUNCTION   : translateMetadata
2872 *
2873 * DESCRIPTION: translate the metadata into camera_metadata_t
2874 *
2875 * PARAMETERS : type of the request
2876 *
2877 *
2878 * RETURN     : success: camera_metadata_t*
2879 *              failure: NULL
2880 *
2881 *==========================================================================*/
2882camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2883{
2884    pthread_mutex_lock(&mMutex);
2885
2886    if (mDefaultMetadata[type] != NULL) {
2887        pthread_mutex_unlock(&mMutex);
2888        return mDefaultMetadata[type];
2889    }
2890    //first time we are handling this request
2891    //fill up the metadata structure using the wrapper class
2892    CameraMetadata settings;
2893    //translate from cam_capability_t to camera_metadata_tag_t
2894    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2895    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2896
2897    /*control*/
2898
2899    uint8_t controlIntent = 0;
2900    switch (type) {
2901      case CAMERA3_TEMPLATE_PREVIEW:
2902        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2903        break;
2904      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2905        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2906        break;
2907      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2908        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2909        break;
2910      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2911        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2912        break;
2913      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2914        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2915        break;
2916      default:
2917        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2918        break;
2919    }
2920    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2921
2922    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2923            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2924
2925    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2926    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2927
2928    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2929    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2930
2931    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2932    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2933
2934    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2935    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2936
2937    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2938    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2939
2940    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2941    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2942
2943    static uint8_t focusMode;
2944    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2945        ALOGE("%s: Setting focus mode to auto", __func__);
2946        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2947    } else {
2948        ALOGE("%s: Setting focus mode to off", __func__);
2949        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2950    }
2951    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2952
2953    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2954    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2955
2956    /*flash*/
2957    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2958    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2959
2960    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2961    settings.update(ANDROID_FLASH_FIRING_POWER,
2962            &flashFiringLevel, 1);
2963
2964    /* lens */
2965    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2966    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2967
2968    if (gCamCapability[mCameraId]->filter_densities_count) {
2969        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2970        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2971                        gCamCapability[mCameraId]->filter_densities_count);
2972    }
2973
2974    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2975    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2976
2977    /* Exposure time(Update the Min Exposure Time)*/
2978    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
2979    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
2980
2981    /* frame duration */
2982    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2983    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2984
2985    /* sensitivity */
2986    static const int32_t default_sensitivity = 100;
2987    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2988
2989    /*edge mode*/
2990    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2991    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2992
2993    /*noise reduction mode*/
2994    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2995    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2996
2997    /*color correction mode*/
2998    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2999    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3000
3001    /*transform matrix mode*/
3002    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3003    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3004
3005    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3006    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3007
3008    mDefaultMetadata[type] = settings.release();
3009
3010    pthread_mutex_unlock(&mMutex);
3011    return mDefaultMetadata[type];
3012}
3013
3014/*===========================================================================
3015 * FUNCTION   : setFrameParameters
3016 *
3017 * DESCRIPTION: set parameters per frame as requested in the metadata from
3018 *              framework
3019 *
3020 * PARAMETERS :
3021 *   @request   : request that needs to be serviced
3022 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3023 *
3024 * RETURN     : success: NO_ERROR
3025 *              failure:
3026 *==========================================================================*/
3027int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3028                    uint32_t streamTypeMask)
3029{
3030    /*translate from camera_metadata_t type to parm_type_t*/
3031    int rc = 0;
3032    if (request->settings == NULL && mFirstRequest) {
3033        /*settings cannot be null for the first request*/
3034        return BAD_VALUE;
3035    }
3036
3037    int32_t hal_version = CAM_HAL_V3;
3038
3039    memset(mParameters, 0, sizeof(parm_buffer_t));
3040    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3041    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3042                sizeof(hal_version), &hal_version);
3043    if (rc < 0) {
3044        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3045        return BAD_VALUE;
3046    }
3047
3048    /*we need to update the frame number in the parameters*/
3049    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3050                                sizeof(request->frame_number), &(request->frame_number));
3051    if (rc < 0) {
3052        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3053        return BAD_VALUE;
3054    }
3055
3056    /* Update stream id mask where buffers are requested */
3057    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3058                                sizeof(streamTypeMask), &streamTypeMask);
3059    if (rc < 0) {
3060        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3061        return BAD_VALUE;
3062    }
3063
3064    if(request->settings != NULL){
3065        rc = translateMetadataToParameters(request);
3066    }
3067    /*set the parameters to backend*/
3068    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3069    return rc;
3070}
3071
3072/*===========================================================================
3073 * FUNCTION   : translateMetadataToParameters
3074 *
3075 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3076 *
3077 *
3078 * PARAMETERS :
3079 *   @request  : request sent from framework
3080 *
3081 *
3082 * RETURN     : success: NO_ERROR
3083 *              failure:
3084 *==========================================================================*/
3085int QCamera3HardwareInterface::translateMetadataToParameters
3086                                  (const camera3_capture_request_t *request)
3087{
3088    int rc = 0;
3089    CameraMetadata frame_settings;
3090    frame_settings = request->settings;
3091
3092    /* Do not change the order of the following list unless you know what you are
3093     * doing.
3094     * The order is laid out in such a way that parameters in the front of the table
3095     * may be used to override the parameters later in the table. Examples are:
3096     * 1. META_MODE should precede AEC/AWB/AF MODE
3097     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3098     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3099     * 4. Any mode should precede it's corresponding settings
3100     */
3101    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3102        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3103        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3104                sizeof(metaMode), &metaMode);
3105        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3106           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3107           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3108                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3109                                             fwk_sceneMode);
3110           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3111                sizeof(sceneMode), &sceneMode);
3112        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3113           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3114           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3115                sizeof(sceneMode), &sceneMode);
3116        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3117           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3118           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3119                sizeof(sceneMode), &sceneMode);
3120        }
3121    }
3122
3123    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3124        uint8_t fwk_aeMode =
3125            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3126        uint8_t aeMode;
3127        int32_t redeye;
3128
3129        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3130            aeMode = CAM_AE_MODE_OFF;
3131        } else {
3132            aeMode = CAM_AE_MODE_ON;
3133        }
3134        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3135            redeye = 1;
3136        } else {
3137            redeye = 0;
3138        }
3139
3140        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3141                                          sizeof(AE_FLASH_MODE_MAP),
3142                                          fwk_aeMode);
3143        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3144                sizeof(aeMode), &aeMode);
3145        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3146                sizeof(flashMode), &flashMode);
3147        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3148                sizeof(redeye), &redeye);
3149    }
3150
3151    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3152        uint8_t fwk_whiteLevel =
3153            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3154        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3155                sizeof(WHITE_BALANCE_MODES_MAP),
3156                fwk_whiteLevel);
3157        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3158                sizeof(whiteLevel), &whiteLevel);
3159    }
3160
3161    float focalDistance = -1.0;
3162    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3163        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3164        rc = AddSetParmEntryToBatch(mParameters,
3165                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3166                sizeof(focalDistance), &focalDistance);
3167    }
3168
3169    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3170        uint8_t fwk_focusMode =
3171            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3172        uint8_t focusMode;
3173        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3174            focusMode = CAM_FOCUS_MODE_INFINITY;
3175        } else{
3176         focusMode = lookupHalName(FOCUS_MODES_MAP,
3177                                   sizeof(FOCUS_MODES_MAP),
3178                                   fwk_focusMode);
3179        }
3180        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3181                sizeof(focusMode), &focusMode);
3182    }
3183
3184    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3185        int32_t antibandingMode =
3186            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3187        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3188                sizeof(antibandingMode), &antibandingMode);
3189    }
3190
3191    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3192        int32_t expCompensation = frame_settings.find(
3193            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3194        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3195            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3196        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3197            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3198        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3199          sizeof(expCompensation), &expCompensation);
3200    }
3201
3202    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3203        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3204        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3205                sizeof(aeLock), &aeLock);
3206    }
3207    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3208        cam_fps_range_t fps_range;
3209        fps_range.min_fps =
3210            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3211        fps_range.max_fps =
3212            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3213        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3214                sizeof(fps_range), &fps_range);
3215    }
3216
3217    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3218        uint8_t awbLock =
3219            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3220        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3221                sizeof(awbLock), &awbLock);
3222    }
3223
3224    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3225        uint8_t fwk_effectMode =
3226            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3227        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3228                sizeof(EFFECT_MODES_MAP),
3229                fwk_effectMode);
3230        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3231                sizeof(effectMode), &effectMode);
3232    }
3233
3234    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3235        uint8_t colorCorrectMode =
3236            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3237        rc =
3238            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3239                    sizeof(colorCorrectMode), &colorCorrectMode);
3240    }
3241
3242    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3243        cam_color_correct_gains_t colorCorrectGains;
3244        for (int i = 0; i < 4; i++) {
3245            colorCorrectGains.gains[i] =
3246                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3247        }
3248        rc =
3249            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3250                    sizeof(colorCorrectGains), &colorCorrectGains);
3251    }
3252
3253    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3254        cam_color_correct_matrix_t colorCorrectTransform;
3255        cam_rational_type_t transform_elem;
3256        int num = 0;
3257        for (int i = 0; i < 3; i++) {
3258           for (int j = 0; j < 3; j++) {
3259              transform_elem.numerator =
3260                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3261              transform_elem.denominator =
3262                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3263              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3264              num++;
3265           }
3266        }
3267        rc =
3268            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3269                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3270    }
3271
3272    cam_trigger_t aecTrigger;
3273    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3274    aecTrigger.trigger_id = -1;
3275    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3276        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3277        aecTrigger.trigger =
3278            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3279        aecTrigger.trigger_id =
3280            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3281    }
3282    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3283                                sizeof(aecTrigger), &aecTrigger);
3284
3285    /*af_trigger must come with a trigger id*/
3286    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3287        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3288        cam_trigger_t af_trigger;
3289        af_trigger.trigger =
3290            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3291        af_trigger.trigger_id =
3292            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3293        rc = AddSetParmEntryToBatch(mParameters,
3294                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3295    }
3296
3297    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3298        int32_t demosaic =
3299            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3300        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3301                sizeof(demosaic), &demosaic);
3302    }
3303
3304    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3305        cam_edge_application_t edge_application;
3306        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3307        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3308            edge_application.sharpness = 0;
3309        } else {
3310            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3311                int32_t edgeStrength =
3312                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3313                edge_application.sharpness = edgeStrength;
3314            } else {
3315                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3316            }
3317        }
3318        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3319                sizeof(edge_application), &edge_application);
3320    }
3321
3322    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3323        int32_t respectFlashMode = 1;
3324        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3325            uint8_t fwk_aeMode =
3326                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3327            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3328                respectFlashMode = 0;
3329                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3330                    __func__);
3331            }
3332        }
3333        if (respectFlashMode) {
3334            uint8_t flashMode =
3335                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3336            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3337                                          sizeof(FLASH_MODES_MAP),
3338                                          flashMode);
3339            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3340            // To check: CAM_INTF_META_FLASH_MODE usage
3341            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3342                          sizeof(flashMode), &flashMode);
3343        }
3344    }
3345
3346    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3347        uint8_t flashPower =
3348            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3350                sizeof(flashPower), &flashPower);
3351    }
3352
3353    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3354        int64_t flashFiringTime =
3355            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3356        rc = AddSetParmEntryToBatch(mParameters,
3357                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3358    }
3359
3360    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3361        uint8_t geometricMode =
3362            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3363        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3364                sizeof(geometricMode), &geometricMode);
3365    }
3366
3367    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3368        uint8_t geometricStrength =
3369            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3370        rc = AddSetParmEntryToBatch(mParameters,
3371                CAM_INTF_META_GEOMETRIC_STRENGTH,
3372                sizeof(geometricStrength), &geometricStrength);
3373    }
3374
3375    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3376        uint8_t hotPixelMode =
3377            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3378        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3379                sizeof(hotPixelMode), &hotPixelMode);
3380    }
3381
3382    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3383        float lensAperture =
3384            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3385        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3386                sizeof(lensAperture), &lensAperture);
3387    }
3388
3389    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3390        float filterDensity =
3391            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3392        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3393                sizeof(filterDensity), &filterDensity);
3394    }
3395
3396    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3397        float focalLength =
3398            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3399        rc = AddSetParmEntryToBatch(mParameters,
3400                CAM_INTF_META_LENS_FOCAL_LENGTH,
3401                sizeof(focalLength), &focalLength);
3402    }
3403
3404    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3405        uint8_t optStabMode =
3406            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3407        rc = AddSetParmEntryToBatch(mParameters,
3408                CAM_INTF_META_LENS_OPT_STAB_MODE,
3409                sizeof(optStabMode), &optStabMode);
3410    }
3411
3412    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3413        uint8_t noiseRedMode =
3414            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3415        rc = AddSetParmEntryToBatch(mParameters,
3416                CAM_INTF_META_NOISE_REDUCTION_MODE,
3417                sizeof(noiseRedMode), &noiseRedMode);
3418    }
3419
3420    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3421        uint8_t noiseRedStrength =
3422            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3423        rc = AddSetParmEntryToBatch(mParameters,
3424                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3425                sizeof(noiseRedStrength), &noiseRedStrength);
3426    }
3427
3428    cam_crop_region_t scalerCropRegion;
3429    bool scalerCropSet = false;
3430    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3431        scalerCropRegion.left =
3432            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3433        scalerCropRegion.top =
3434            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3435        scalerCropRegion.width =
3436            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3437        scalerCropRegion.height =
3438            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3439        rc = AddSetParmEntryToBatch(mParameters,
3440                CAM_INTF_META_SCALER_CROP_REGION,
3441                sizeof(scalerCropRegion), &scalerCropRegion);
3442        scalerCropSet = true;
3443    }
3444
3445    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3446        int64_t sensorExpTime =
3447            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3448        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3449        rc = AddSetParmEntryToBatch(mParameters,
3450                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3451                sizeof(sensorExpTime), &sensorExpTime);
3452    }
3453
3454    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3455        int64_t sensorFrameDuration =
3456            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3457        int64_t minFrameDuration = getMinFrameDuration(request);
3458        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3459        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3460            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3461        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3462        rc = AddSetParmEntryToBatch(mParameters,
3463                CAM_INTF_META_SENSOR_FRAME_DURATION,
3464                sizeof(sensorFrameDuration), &sensorFrameDuration);
3465    }
3466
3467    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3468        int32_t sensorSensitivity =
3469            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3470        if (sensorSensitivity <
3471                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3472            sensorSensitivity =
3473                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3474        if (sensorSensitivity >
3475                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3476            sensorSensitivity =
3477                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3478        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3479        rc = AddSetParmEntryToBatch(mParameters,
3480                CAM_INTF_META_SENSOR_SENSITIVITY,
3481                sizeof(sensorSensitivity), &sensorSensitivity);
3482    }
3483
3484    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3485        int32_t shadingMode =
3486            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3487        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3488                sizeof(shadingMode), &shadingMode);
3489    }
3490
3491    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3492        uint8_t shadingStrength =
3493            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3494        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3495                sizeof(shadingStrength), &shadingStrength);
3496    }
3497
3498    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3499        uint8_t fwk_facedetectMode =
3500            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3501        uint8_t facedetectMode =
3502            lookupHalName(FACEDETECT_MODES_MAP,
3503                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3504        rc = AddSetParmEntryToBatch(mParameters,
3505                CAM_INTF_META_STATS_FACEDETECT_MODE,
3506                sizeof(facedetectMode), &facedetectMode);
3507    }
3508
3509    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3510        uint8_t histogramMode =
3511            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3512        rc = AddSetParmEntryToBatch(mParameters,
3513                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3514                sizeof(histogramMode), &histogramMode);
3515    }
3516
3517    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3518        uint8_t sharpnessMapMode =
3519            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3520        rc = AddSetParmEntryToBatch(mParameters,
3521                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3522                sizeof(sharpnessMapMode), &sharpnessMapMode);
3523    }
3524
3525    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3526        uint8_t tonemapMode =
3527            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3528        rc = AddSetParmEntryToBatch(mParameters,
3529                CAM_INTF_META_TONEMAP_MODE,
3530                sizeof(tonemapMode), &tonemapMode);
3531    }
3532    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3533    /*All tonemap channels will have the same number of points*/
3534    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3535        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3536        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3537        cam_rgb_tonemap_curves tonemapCurves;
3538        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3539
3540        /* ch0 = G*/
3541        int point = 0;
3542        cam_tonemap_curve_t tonemapCurveGreen;
3543        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3544            for (int j = 0; j < 2; j++) {
3545               tonemapCurveGreen.tonemap_points[i][j] =
3546                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3547               point++;
3548            }
3549        }
3550        tonemapCurves.curves[0] = tonemapCurveGreen;
3551
3552        /* ch 1 = B */
3553        point = 0;
3554        cam_tonemap_curve_t tonemapCurveBlue;
3555        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3556            for (int j = 0; j < 2; j++) {
3557               tonemapCurveBlue.tonemap_points[i][j] =
3558                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3559               point++;
3560            }
3561        }
3562        tonemapCurves.curves[1] = tonemapCurveBlue;
3563
3564        /* ch 2 = R */
3565        point = 0;
3566        cam_tonemap_curve_t tonemapCurveRed;
3567        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3568            for (int j = 0; j < 2; j++) {
3569               tonemapCurveRed.tonemap_points[i][j] =
3570                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3571               point++;
3572            }
3573        }
3574        tonemapCurves.curves[2] = tonemapCurveRed;
3575
3576        rc = AddSetParmEntryToBatch(mParameters,
3577                CAM_INTF_META_TONEMAP_CURVES,
3578                sizeof(tonemapCurves), &tonemapCurves);
3579    }
3580
3581    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3582        uint8_t captureIntent =
3583            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3584        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3585                sizeof(captureIntent), &captureIntent);
3586    }
3587
3588    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3589        uint8_t blackLevelLock =
3590            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3591        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3592                sizeof(blackLevelLock), &blackLevelLock);
3593    }
3594
3595    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3596        uint8_t lensShadingMapMode =
3597            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3598        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3599                sizeof(lensShadingMapMode), &lensShadingMapMode);
3600    }
3601
3602    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3603        cam_area_t roi;
3604        bool reset = true;
3605        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3606        if (scalerCropSet) {
3607            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3608        }
3609        if (reset) {
3610            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3611                    sizeof(roi), &roi);
3612        }
3613    }
3614
3615    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3616        cam_area_t roi;
3617        bool reset = true;
3618        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3619        if (scalerCropSet) {
3620            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3621        }
3622        if (reset) {
3623            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3624                    sizeof(roi), &roi);
3625        }
3626    }
3627
3628    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3629        cam_area_t roi;
3630        bool reset = true;
3631        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3632        if (scalerCropSet) {
3633            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3634        }
3635        if (reset) {
3636            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3637                    sizeof(roi), &roi);
3638        }
3639    }
3640    return rc;
3641}
3642
3643/*===========================================================================
3644 * FUNCTION   : getJpegSettings
3645 *
3646 * DESCRIPTION: save the jpeg settings in the HAL
3647 *
3648 *
3649 * PARAMETERS :
3650 *   @settings  : frame settings information from framework
3651 *
3652 *
3653 * RETURN     : success: NO_ERROR
3654 *              failure:
3655 *==========================================================================*/
3656int QCamera3HardwareInterface::getJpegSettings
3657                                  (const camera_metadata_t *settings)
3658{
3659    if (mJpegSettings) {
3660        if (mJpegSettings->gps_timestamp) {
3661            free(mJpegSettings->gps_timestamp);
3662            mJpegSettings->gps_timestamp = NULL;
3663        }
3664        if (mJpegSettings->gps_coordinates) {
3665            for (int i = 0; i < 3; i++) {
3666                free(mJpegSettings->gps_coordinates[i]);
3667                mJpegSettings->gps_coordinates[i] = NULL;
3668            }
3669        }
3670        free(mJpegSettings);
3671        mJpegSettings = NULL;
3672    }
3673    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3674    CameraMetadata jpeg_settings;
3675    jpeg_settings = settings;
3676
3677    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3678        mJpegSettings->jpeg_orientation =
3679            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3680    } else {
3681        mJpegSettings->jpeg_orientation = 0;
3682    }
3683    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3684        mJpegSettings->jpeg_quality =
3685            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3686    } else {
3687        mJpegSettings->jpeg_quality = 85;
3688    }
3689    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3690        mJpegSettings->thumbnail_size.width =
3691            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3692        mJpegSettings->thumbnail_size.height =
3693            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3694    } else {
3695        mJpegSettings->thumbnail_size.width = 0;
3696        mJpegSettings->thumbnail_size.height = 0;
3697    }
3698    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3699        for (int i = 0; i < 3; i++) {
3700            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3701            *(mJpegSettings->gps_coordinates[i]) =
3702                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3703        }
3704    } else{
3705       for (int i = 0; i < 3; i++) {
3706            mJpegSettings->gps_coordinates[i] = NULL;
3707        }
3708    }
3709
3710    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3711        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3712        *(mJpegSettings->gps_timestamp) =
3713            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3714    } else {
3715        mJpegSettings->gps_timestamp = NULL;
3716    }
3717
3718    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3719        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3720        for (int i = 0; i < len; i++) {
3721            mJpegSettings->gps_processing_method[i] =
3722                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3723        }
3724        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3725            mJpegSettings->gps_processing_method[len] = '\0';
3726        }
3727    } else {
3728        mJpegSettings->gps_processing_method[0] = '\0';
3729    }
3730
3731    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3732        mJpegSettings->sensor_sensitivity =
3733            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3734    } else {
3735        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3736    }
3737
3738    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3739
3740    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3741        mJpegSettings->lens_focal_length =
3742            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3743    }
3744    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3745        mJpegSettings->exposure_compensation =
3746            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3747    }
3748    mJpegSettings->sharpness = 10; //default value
3749    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3750        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3751        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3752            mJpegSettings->sharpness = 0;
3753        }
3754    }
3755    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3756    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3757    mJpegSettings->is_jpeg_format = true;
3758    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3759    return 0;
3760}
3761
3762/*===========================================================================
3763 * FUNCTION   : captureResultCb
3764 *
3765 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3766 *
3767 * PARAMETERS :
3768 *   @frame  : frame information from mm-camera-interface
3769 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3770 *   @userdata: userdata
3771 *
3772 * RETURN     : NONE
3773 *==========================================================================*/
3774void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3775                camera3_stream_buffer_t *buffer,
3776                uint32_t frame_number, void *userdata)
3777{
3778    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3779    if (hw == NULL) {
3780        ALOGE("%s: Invalid hw %p", __func__, hw);
3781        return;
3782    }
3783
3784    hw->captureResultCb(metadata, buffer, frame_number);
3785    return;
3786}
3787
3788
3789/*===========================================================================
3790 * FUNCTION   : initialize
3791 *
3792 * DESCRIPTION: Pass framework callback pointers to HAL
3793 *
3794 * PARAMETERS :
3795 *
3796 *
3797 * RETURN     : Success : 0
3798 *              Failure: -ENODEV
3799 *==========================================================================*/
3800
3801int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3802                                  const camera3_callback_ops_t *callback_ops)
3803{
3804    ALOGV("%s: E", __func__);
3805    QCamera3HardwareInterface *hw =
3806        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3807    if (!hw) {
3808        ALOGE("%s: NULL camera device", __func__);
3809        return -ENODEV;
3810    }
3811
3812    int rc = hw->initialize(callback_ops);
3813    ALOGV("%s: X", __func__);
3814    return rc;
3815}
3816
3817/*===========================================================================
3818 * FUNCTION   : configure_streams
3819 *
3820 * DESCRIPTION:
3821 *
3822 * PARAMETERS :
3823 *
3824 *
3825 * RETURN     : Success: 0
3826 *              Failure: -EINVAL (if stream configuration is invalid)
3827 *                       -ENODEV (fatal error)
3828 *==========================================================================*/
3829
3830int QCamera3HardwareInterface::configure_streams(
3831        const struct camera3_device *device,
3832        camera3_stream_configuration_t *stream_list)
3833{
3834    ALOGV("%s: E", __func__);
3835    QCamera3HardwareInterface *hw =
3836        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3837    if (!hw) {
3838        ALOGE("%s: NULL camera device", __func__);
3839        return -ENODEV;
3840    }
3841    int rc = hw->configureStreams(stream_list);
3842    ALOGV("%s: X", __func__);
3843    return rc;
3844}
3845
3846/*===========================================================================
3847 * FUNCTION   : register_stream_buffers
3848 *
3849 * DESCRIPTION: Register stream buffers with the device
3850 *
3851 * PARAMETERS :
3852 *
3853 * RETURN     :
3854 *==========================================================================*/
3855int QCamera3HardwareInterface::register_stream_buffers(
3856        const struct camera3_device *device,
3857        const camera3_stream_buffer_set_t *buffer_set)
3858{
3859    ALOGV("%s: E", __func__);
3860    QCamera3HardwareInterface *hw =
3861        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3862    if (!hw) {
3863        ALOGE("%s: NULL camera device", __func__);
3864        return -ENODEV;
3865    }
3866    int rc = hw->registerStreamBuffers(buffer_set);
3867    ALOGV("%s: X", __func__);
3868    return rc;
3869}
3870
3871/*===========================================================================
3872 * FUNCTION   : construct_default_request_settings
3873 *
3874 * DESCRIPTION: Configure a settings buffer to meet the required use case
3875 *
3876 * PARAMETERS :
3877 *
3878 *
3879 * RETURN     : Success: Return valid metadata
3880 *              Failure: Return NULL
3881 *==========================================================================*/
3882const camera_metadata_t* QCamera3HardwareInterface::
3883    construct_default_request_settings(const struct camera3_device *device,
3884                                        int type)
3885{
3886
3887    ALOGV("%s: E", __func__);
3888    camera_metadata_t* fwk_metadata = NULL;
3889    QCamera3HardwareInterface *hw =
3890        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3891    if (!hw) {
3892        ALOGE("%s: NULL camera device", __func__);
3893        return NULL;
3894    }
3895
3896    fwk_metadata = hw->translateCapabilityToMetadata(type);
3897
3898    ALOGV("%s: X", __func__);
3899    return fwk_metadata;
3900}
3901
3902/*===========================================================================
3903 * FUNCTION   : process_capture_request
3904 *
3905 * DESCRIPTION:
3906 *
3907 * PARAMETERS :
3908 *
3909 *
3910 * RETURN     :
3911 *==========================================================================*/
3912int QCamera3HardwareInterface::process_capture_request(
3913                    const struct camera3_device *device,
3914                    camera3_capture_request_t *request)
3915{
3916    ALOGV("%s: E", __func__);
3917    QCamera3HardwareInterface *hw =
3918        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3919    if (!hw) {
3920        ALOGE("%s: NULL camera device", __func__);
3921        return -EINVAL;
3922    }
3923
3924    int rc = hw->processCaptureRequest(request);
3925    ALOGV("%s: X", __func__);
3926    return rc;
3927}
3928
3929/*===========================================================================
3930 * FUNCTION   : get_metadata_vendor_tag_ops
3931 *
3932 * DESCRIPTION:
3933 *
3934 * PARAMETERS :
3935 *
3936 *
3937 * RETURN     :
3938 *==========================================================================*/
3939
3940void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3941                const struct camera3_device *device,
3942                vendor_tag_query_ops_t* ops)
3943{
3944    ALOGV("%s: E", __func__);
3945    QCamera3HardwareInterface *hw =
3946        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3947    if (!hw) {
3948        ALOGE("%s: NULL camera device", __func__);
3949        return;
3950    }
3951
3952    hw->getMetadataVendorTagOps(ops);
3953    ALOGV("%s: X", __func__);
3954    return;
3955}
3956
3957/*===========================================================================
3958 * FUNCTION   : dump
3959 *
3960 * DESCRIPTION:
3961 *
3962 * PARAMETERS :
3963 *
3964 *
3965 * RETURN     :
3966 *==========================================================================*/
3967
3968void QCamera3HardwareInterface::dump(
3969                const struct camera3_device *device, int fd)
3970{
3971    ALOGV("%s: E", __func__);
3972    QCamera3HardwareInterface *hw =
3973        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3974    if (!hw) {
3975        ALOGE("%s: NULL camera device", __func__);
3976        return;
3977    }
3978
3979    hw->dump(fd);
3980    ALOGV("%s: X", __func__);
3981    return;
3982}
3983
3984/*===========================================================================
3985 * FUNCTION   : flush
3986 *
3987 * DESCRIPTION:
3988 *
3989 * PARAMETERS :
3990 *
3991 *
3992 * RETURN     :
3993 *==========================================================================*/
3994
3995int QCamera3HardwareInterface::flush(
3996                const struct camera3_device *device)
3997{
3998    int rc;
3999    ALOGV("%s: E", __func__);
4000    QCamera3HardwareInterface *hw =
4001        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4002    if (!hw) {
4003        ALOGE("%s: NULL camera device", __func__);
4004        return -EINVAL;
4005    }
4006
4007    rc = hw->flush();
4008    ALOGV("%s: X", __func__);
4009    return rc;
4010}
4011
4012/*===========================================================================
4013 * FUNCTION   : close_camera_device
4014 *
4015 * DESCRIPTION:
4016 *
4017 * PARAMETERS :
4018 *
4019 *
4020 * RETURN     :
4021 *==========================================================================*/
4022int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4023{
4024    ALOGV("%s: E", __func__);
4025    int ret = NO_ERROR;
4026    QCamera3HardwareInterface *hw =
4027        reinterpret_cast<QCamera3HardwareInterface *>(
4028            reinterpret_cast<camera3_device_t *>(device)->priv);
4029    if (!hw) {
4030        ALOGE("NULL camera device");
4031        return BAD_VALUE;
4032    }
4033    delete hw;
4034
4035    pthread_mutex_lock(&mCameraSessionLock);
4036    mCameraSessionActive = 0;
4037    pthread_mutex_unlock(&mCameraSessionLock);
4038    ALOGV("%s: X", __func__);
4039    return ret;
4040}
4041
4042/*===========================================================================
4043 * FUNCTION   : getWaveletDenoiseProcessPlate
4044 *
4045 * DESCRIPTION: query wavelet denoise process plate
4046 *
4047 * PARAMETERS : None
4048 *
4049 * RETURN     : WNR prcocess plate vlaue
4050 *==========================================================================*/
4051cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4052{
4053    char prop[PROPERTY_VALUE_MAX];
4054    memset(prop, 0, sizeof(prop));
4055    property_get("persist.denoise.process.plates", prop, "0");
4056    int processPlate = atoi(prop);
4057    switch(processPlate) {
4058    case 0:
4059        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4060    case 1:
4061        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4062    case 2:
4063        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4064    case 3:
4065        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4066    default:
4067        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4068    }
4069}
4070
4071/*===========================================================================
4072 * FUNCTION   : needRotationReprocess
4073 *
4074 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4075 *
4076 * PARAMETERS : none
4077 *
4078 * RETURN     : true: needed
4079 *              false: no need
4080 *==========================================================================*/
4081bool QCamera3HardwareInterface::needRotationReprocess()
4082{
4083
4084    if (!mJpegSettings->is_jpeg_format) {
4085        // RAW image, no need to reprocess
4086        return false;
4087    }
4088
4089    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4090        mJpegSettings->jpeg_orientation > 0) {
4091        // current rotation is not zero, and pp has the capability to process rotation
4092        ALOGD("%s: need do reprocess for rotation", __func__);
4093        return true;
4094    }
4095
4096    return false;
4097}
4098
4099/*===========================================================================
4100 * FUNCTION   : needReprocess
4101 *
4102 * DESCRIPTION: if reprocess in needed
4103 *
4104 * PARAMETERS : none
4105 *
4106 * RETURN     : true: needed
4107 *              false: no need
4108 *==========================================================================*/
4109bool QCamera3HardwareInterface::needReprocess()
4110{
4111    if (!mJpegSettings->is_jpeg_format) {
4112        // RAW image, no need to reprocess
4113        return false;
4114    }
4115
4116    if ((mJpegSettings->min_required_pp_mask > 0) ||
4117         isWNREnabled()) {
4118        // TODO: add for ZSL HDR later
4119        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4120        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4121        return true;
4122    }
4123    return needRotationReprocess();
4124}
4125
4126/*===========================================================================
4127 * FUNCTION   : addOnlineReprocChannel
4128 *
4129 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4130 *              coming from input channel
4131 *
4132 * PARAMETERS :
4133 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4134 *
4135 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4136 *==========================================================================*/
4137QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4138              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4139{
4140    int32_t rc = NO_ERROR;
4141    QCamera3ReprocessChannel *pChannel = NULL;
4142    if (pInputChannel == NULL) {
4143        ALOGE("%s: input channel obj is NULL", __func__);
4144        return NULL;
4145    }
4146
4147    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4148            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4149    if (NULL == pChannel) {
4150        ALOGE("%s: no mem for reprocess channel", __func__);
4151        return NULL;
4152    }
4153
4154    // Capture channel, only need snapshot and postview streams start together
4155    mm_camera_channel_attr_t attr;
4156    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4157    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4158    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4159    rc = pChannel->initialize();
4160    if (rc != NO_ERROR) {
4161        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4162        delete pChannel;
4163        return NULL;
4164    }
4165
4166    // pp feature config
4167    cam_pp_feature_config_t pp_config;
4168    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4169    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4170        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4171        pp_config.sharpness = mJpegSettings->sharpness;
4172    }
4173
4174    if (isWNREnabled()) {
4175        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4176        pp_config.denoise2d.denoise_enable = 1;
4177        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4178    }
4179    if (needRotationReprocess()) {
4180        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4181        int rotation = mJpegSettings->jpeg_orientation;
4182        if (rotation == 0) {
4183            pp_config.rotation = ROTATE_0;
4184        } else if (rotation == 90) {
4185            pp_config.rotation = ROTATE_90;
4186        } else if (rotation == 180) {
4187            pp_config.rotation = ROTATE_180;
4188        } else if (rotation == 270) {
4189            pp_config.rotation = ROTATE_270;
4190        }
4191    }
4192
4193   rc = pChannel->addReprocStreamsFromSource(pp_config,
4194                                             pInputChannel,
4195                                             mMetadataChannel);
4196
4197    if (rc != NO_ERROR) {
4198        delete pChannel;
4199        return NULL;
4200    }
4201    return pChannel;
4202}
4203
4204int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4205{
4206    return gCamCapability[mCameraId]->min_num_pp_bufs;
4207}
4208
4209bool QCamera3HardwareInterface::isWNREnabled() {
4210    return gCamCapability[mCameraId]->isWnrSupported;
4211}
4212
4213}; //end namespace qcamera
4214