QCamera3HWI.cpp revision e310738e63535838ed4659a55d540b75abc84c19
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
127};
128
129const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
130    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
131    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
132};
133
134const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
135                                             320, 240, 176, 144, 0, 0};
136
137camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
138    initialize:                         QCamera3HardwareInterface::initialize,
139    configure_streams:                  QCamera3HardwareInterface::configure_streams,
140    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
141    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
142    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
143    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
144    dump:                               QCamera3HardwareInterface::dump,
145};
146
147
148/*===========================================================================
149 * FUNCTION   : QCamera3HardwareInterface
150 *
151 * DESCRIPTION: constructor of QCamera3HardwareInterface
152 *
153 * PARAMETERS :
154 *   @cameraId  : camera ID
155 *
156 * RETURN     : none
157 *==========================================================================*/
158QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
159    : mCameraId(cameraId),
160      mCameraHandle(NULL),
161      mCameraOpened(false),
162      mCameraInitialized(false),
163      mCallbackOps(NULL),
164      mInputStream(NULL),
165      mMetadataChannel(NULL),
166      mPictureChannel(NULL),
167      mFirstRequest(false),
168      mParamHeap(NULL),
169      mParameters(NULL),
170      mJpegSettings(NULL),
171      mIsZslMode(false),
172      m_pPowerModule(NULL)
173{
174    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
175    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
176    mCameraDevice.common.close = close_camera_device;
177    mCameraDevice.ops = &mCameraOps;
178    mCameraDevice.priv = this;
179    gCamCapability[cameraId]->version = CAM_HAL_V3;
180    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
181    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
182    gCamCapability[cameraId]->min_num_pp_bufs = 3;
183
184    pthread_cond_init(&mRequestCond, NULL);
185    mPendingRequest = 0;
186    mCurrentRequestId = -1;
187    pthread_mutex_init(&mMutex, NULL);
188
189    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
190        mDefaultMetadata[i] = NULL;
191
192#ifdef HAS_MULTIMEDIA_HINTS
193    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
194        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
195    }
196#endif
197}
198
199/*===========================================================================
200 * FUNCTION   : ~QCamera3HardwareInterface
201 *
202 * DESCRIPTION: destructor of QCamera3HardwareInterface
203 *
204 * PARAMETERS : none
205 *
206 * RETURN     : none
207 *==========================================================================*/
208QCamera3HardwareInterface::~QCamera3HardwareInterface()
209{
210    ALOGV("%s: E", __func__);
211    /* We need to stop all streams before deleting any stream */
212    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
213        it != mStreamInfo.end(); it++) {
214        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
215        if (channel)
216           channel->stop();
217    }
218    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
219        it != mStreamInfo.end(); it++) {
220        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
221        if (channel)
222            delete channel;
223        free (*it);
224    }
225
226    mPictureChannel = NULL;
227
228    if (mJpegSettings != NULL) {
229        free(mJpegSettings);
230        mJpegSettings = NULL;
231    }
232
233    /* Clean up all channels */
234    if (mCameraInitialized) {
235        mMetadataChannel->stop();
236        delete mMetadataChannel;
237        mMetadataChannel = NULL;
238        deinitParameters();
239    }
240
241    if (mCameraOpened)
242        closeCamera();
243
244    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
245        if (mDefaultMetadata[i])
246            free_camera_metadata(mDefaultMetadata[i]);
247
248    pthread_cond_destroy(&mRequestCond);
249
250    pthread_mutex_destroy(&mMutex);
251    ALOGV("%s: X", __func__);
252}
253
254/*===========================================================================
255 * FUNCTION   : openCamera
256 *
257 * DESCRIPTION: open camera
258 *
259 * PARAMETERS :
260 *   @hw_device  : double ptr for camera device struct
261 *
262 * RETURN     : int32_t type of status
263 *              NO_ERROR  -- success
264 *              none-zero failure code
265 *==========================================================================*/
266int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
267{
268    int rc = 0;
269    pthread_mutex_lock(&mCameraSessionLock);
270    if (mCameraSessionActive) {
271        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
272        pthread_mutex_unlock(&mCameraSessionLock);
273        return INVALID_OPERATION;
274    }
275
276    if (mCameraOpened) {
277        *hw_device = NULL;
278        return PERMISSION_DENIED;
279    }
280
281    rc = openCamera();
282    if (rc == 0) {
283        *hw_device = &mCameraDevice.common;
284        mCameraSessionActive = 1;
285    } else
286        *hw_device = NULL;
287
288#ifdef HAS_MULTIMEDIA_HINTS
289    if (rc == 0) {
290        if (m_pPowerModule) {
291            if (m_pPowerModule->powerHint) {
292                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
293                        (void *)"state=1");
294            }
295        }
296    }
297#endif
298    pthread_mutex_unlock(&mCameraSessionLock);
299    return rc;
300}
301
302/*===========================================================================
303 * FUNCTION   : openCamera
304 *
305 * DESCRIPTION: open camera
306 *
307 * PARAMETERS : none
308 *
309 * RETURN     : int32_t type of status
310 *              NO_ERROR  -- success
311 *              none-zero failure code
312 *==========================================================================*/
313int QCamera3HardwareInterface::openCamera()
314{
315    if (mCameraHandle) {
316        ALOGE("Failure: Camera already opened");
317        return ALREADY_EXISTS;
318    }
319    mCameraHandle = camera_open(mCameraId);
320    if (!mCameraHandle) {
321        ALOGE("camera_open failed.");
322        return UNKNOWN_ERROR;
323    }
324
325    mCameraOpened = true;
326
327    return NO_ERROR;
328}
329
330/*===========================================================================
331 * FUNCTION   : closeCamera
332 *
333 * DESCRIPTION: close camera
334 *
335 * PARAMETERS : none
336 *
337 * RETURN     : int32_t type of status
338 *              NO_ERROR  -- success
339 *              none-zero failure code
340 *==========================================================================*/
341int QCamera3HardwareInterface::closeCamera()
342{
343    int rc = NO_ERROR;
344
345    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
346    mCameraHandle = NULL;
347    mCameraOpened = false;
348
349#ifdef HAS_MULTIMEDIA_HINTS
350    if (rc == NO_ERROR) {
351        if (m_pPowerModule) {
352            if (m_pPowerModule->powerHint) {
353                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
354                        (void *)"state=0");
355            }
356        }
357    }
358#endif
359
360    return rc;
361}
362
363/*===========================================================================
364 * FUNCTION   : initialize
365 *
366 * DESCRIPTION: Initialize frameworks callback functions
367 *
368 * PARAMETERS :
369 *   @callback_ops : callback function to frameworks
370 *
371 * RETURN     :
372 *
373 *==========================================================================*/
374int QCamera3HardwareInterface::initialize(
375        const struct camera3_callback_ops *callback_ops)
376{
377    int rc;
378
379    pthread_mutex_lock(&mMutex);
380
381    rc = initParameters();
382    if (rc < 0) {
383        ALOGE("%s: initParamters failed %d", __func__, rc);
384       goto err1;
385    }
386    //Create metadata channel and initialize it
387    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
388                    mCameraHandle->ops, captureResultCb,
389                    &gCamCapability[mCameraId]->padding_info, this);
390    if (mMetadataChannel == NULL) {
391        ALOGE("%s: failed to allocate metadata channel", __func__);
392        rc = -ENOMEM;
393        goto err2;
394    }
395    rc = mMetadataChannel->initialize();
396    if (rc < 0) {
397        ALOGE("%s: metadata channel initialization failed", __func__);
398        goto err3;
399    }
400
401    mCallbackOps = callback_ops;
402
403    pthread_mutex_unlock(&mMutex);
404    mCameraInitialized = true;
405    return 0;
406
407err3:
408    delete mMetadataChannel;
409    mMetadataChannel = NULL;
410err2:
411    deinitParameters();
412err1:
413    pthread_mutex_unlock(&mMutex);
414    return rc;
415}
416
417/*===========================================================================
418 * FUNCTION   : configureStreams
419 *
420 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
421 *              and output streams.
422 *
423 * PARAMETERS :
424 *   @stream_list : streams to be configured
425 *
426 * RETURN     :
427 *
428 *==========================================================================*/
429int QCamera3HardwareInterface::configureStreams(
430        camera3_stream_configuration_t *streamList)
431{
432    int rc = 0;
433    pthread_mutex_lock(&mMutex);
434    // Sanity check stream_list
435    if (streamList == NULL) {
436        ALOGE("%s: NULL stream configuration", __func__);
437        pthread_mutex_unlock(&mMutex);
438        return BAD_VALUE;
439    }
440
441    if (streamList->streams == NULL) {
442        ALOGE("%s: NULL stream list", __func__);
443        pthread_mutex_unlock(&mMutex);
444        return BAD_VALUE;
445    }
446
447    if (streamList->num_streams < 1) {
448        ALOGE("%s: Bad number of streams requested: %d", __func__,
449                streamList->num_streams);
450        pthread_mutex_unlock(&mMutex);
451        return BAD_VALUE;
452    }
453
454    camera3_stream_t *inputStream = NULL;
455    camera3_stream_t *jpegStream = NULL;
456    /* first invalidate all the steams in the mStreamList
457     * if they appear again, they will be validated */
458    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
459            it != mStreamInfo.end(); it++) {
460        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
461        channel->stop();
462        (*it)->status = INVALID;
463    }
464
465    for (size_t i = 0; i < streamList->num_streams; i++) {
466        camera3_stream_t *newStream = streamList->streams[i];
467        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
468                __func__, newStream->stream_type, newStream->format,
469                 newStream->width, newStream->height);
470        //if the stream is in the mStreamList validate it
471        bool stream_exists = false;
472        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
473                it != mStreamInfo.end(); it++) {
474            if ((*it)->stream == newStream) {
475                QCamera3Channel *channel =
476                    (QCamera3Channel*)(*it)->stream->priv;
477                stream_exists = true;
478                (*it)->status = RECONFIGURE;
479                /*delete the channel object associated with the stream because
480                  we need to reconfigure*/
481                delete channel;
482                (*it)->stream->priv = NULL;
483            }
484        }
485        if (!stream_exists) {
486            //new stream
487            stream_info_t* stream_info;
488            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
489            stream_info->stream = newStream;
490            stream_info->status = VALID;
491            stream_info->registered = 0;
492            mStreamInfo.push_back(stream_info);
493        }
494        if (newStream->stream_type == CAMERA3_STREAM_INPUT
495                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
496            if (inputStream != NULL) {
497                ALOGE("%s: Multiple input streams requested!", __func__);
498                pthread_mutex_unlock(&mMutex);
499                return BAD_VALUE;
500            }
501            inputStream = newStream;
502        }
503        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
504            jpegStream = newStream;
505        }
506    }
507    mInputStream = inputStream;
508
509    /*clean up invalid streams*/
510    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
511            it != mStreamInfo.end();) {
512        if(((*it)->status) == INVALID){
513            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
514            delete channel;
515            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
516            free(*it);
517            it = mStreamInfo.erase(it);
518        } else {
519            it++;
520        }
521    }
522
523    //mMetadataChannel->stop();
524
525    /* Allocate channel objects for the requested streams */
526    for (size_t i = 0; i < streamList->num_streams; i++) {
527        camera3_stream_t *newStream = streamList->streams[i];
528        if (newStream->priv == NULL) {
529            //New stream, construct channel
530            switch (newStream->stream_type) {
531            case CAMERA3_STREAM_INPUT:
532                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
533                break;
534            case CAMERA3_STREAM_BIDIRECTIONAL:
535                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
536                    GRALLOC_USAGE_HW_CAMERA_WRITE;
537                break;
538            case CAMERA3_STREAM_OUTPUT:
539                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
540                break;
541            default:
542                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
543                break;
544            }
545
546            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
547                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
548                QCamera3Channel *channel;
549                switch (newStream->format) {
550                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
551                case HAL_PIXEL_FORMAT_YCbCr_420_888:
552                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
553                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
554                        jpegStream) {
555                        uint32_t width = jpegStream->width;
556                        uint32_t height = jpegStream->height;
557                        mIsZslMode = true;
558                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
559                            mCameraHandle->ops, captureResultCb,
560                            &gCamCapability[mCameraId]->padding_info, this, newStream,
561                            width, height);
562                    } else
563                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
564                            mCameraHandle->ops, captureResultCb,
565                            &gCamCapability[mCameraId]->padding_info, this, newStream);
566                    if (channel == NULL) {
567                        ALOGE("%s: allocation of channel failed", __func__);
568                        pthread_mutex_unlock(&mMutex);
569                        return -ENOMEM;
570                    }
571
572                    newStream->priv = channel;
573                    break;
574                case HAL_PIXEL_FORMAT_BLOB:
575                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
576                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
577                            mCameraHandle->ops, captureResultCb,
578                            &gCamCapability[mCameraId]->padding_info, this, newStream);
579                    if (mPictureChannel == NULL) {
580                        ALOGE("%s: allocation of channel failed", __func__);
581                        pthread_mutex_unlock(&mMutex);
582                        return -ENOMEM;
583                    }
584                    newStream->priv = (QCamera3Channel*)mPictureChannel;
585                    break;
586
587                //TODO: Add support for app consumed format?
588                default:
589                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
590                    break;
591                }
592            }
593        } else {
594            // Channel already exists for this stream
595            // Do nothing for now
596        }
597    }
598    /*For the streams to be reconfigured we need to register the buffers
599      since the framework wont*/
600    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
601            it != mStreamInfo.end(); it++) {
602        if ((*it)->status == RECONFIGURE) {
603            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
604            /*only register buffers for streams that have already been
605              registered*/
606            if ((*it)->registered) {
607                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
608                        (*it)->buffer_set.buffers);
609                if (rc != NO_ERROR) {
610                    ALOGE("%s: Failed to register the buffers of old stream,\
611                            rc = %d", __func__, rc);
612                }
613                ALOGV("%s: channel %p has %d buffers",
614                        __func__, channel, (*it)->buffer_set.num_buffers);
615            }
616        }
617
618        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
619        if (index == NAME_NOT_FOUND) {
620            mPendingBuffersMap.add((*it)->stream, 0);
621        } else {
622            mPendingBuffersMap.editValueAt(index) = 0;
623        }
624    }
625
626    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
627    mPendingRequestsList.clear();
628
629    //settings/parameters don't carry over for new configureStreams
630    memset(mParameters, 0, sizeof(parm_buffer_t));
631    mFirstRequest = true;
632
633    pthread_mutex_unlock(&mMutex);
634    return rc;
635}
636
637/*===========================================================================
638 * FUNCTION   : validateCaptureRequest
639 *
640 * DESCRIPTION: validate a capture request from camera service
641 *
642 * PARAMETERS :
643 *   @request : request from framework to process
644 *
645 * RETURN     :
646 *
647 *==========================================================================*/
648int QCamera3HardwareInterface::validateCaptureRequest(
649                    camera3_capture_request_t *request)
650{
651    ssize_t idx = 0;
652    const camera3_stream_buffer_t *b;
653    CameraMetadata meta;
654
655    /* Sanity check the request */
656    if (request == NULL) {
657        ALOGE("%s: NULL capture request", __func__);
658        return BAD_VALUE;
659    }
660
661    uint32_t frameNumber = request->frame_number;
662    if (request->input_buffer != NULL &&
663            request->input_buffer->stream != mInputStream) {
664        ALOGE("%s: Request %d: Input buffer not from input stream!",
665                __FUNCTION__, frameNumber);
666        return BAD_VALUE;
667    }
668    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
669        ALOGE("%s: Request %d: No output buffers provided!",
670                __FUNCTION__, frameNumber);
671        return BAD_VALUE;
672    }
673    if (request->input_buffer != NULL) {
674        b = request->input_buffer;
675        QCamera3Channel *channel =
676            static_cast<QCamera3Channel*>(b->stream->priv);
677        if (channel == NULL) {
678            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
679                    __func__, frameNumber, idx);
680            return BAD_VALUE;
681        }
682        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
683            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
684                    __func__, frameNumber, idx);
685            return BAD_VALUE;
686        }
687        if (b->release_fence != -1) {
688            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
689                    __func__, frameNumber, idx);
690            return BAD_VALUE;
691        }
692        if (b->buffer == NULL) {
693            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
694                    __func__, frameNumber, idx);
695            return BAD_VALUE;
696        }
697    }
698
699    // Validate all buffers
700    b = request->output_buffers;
701    do {
702        QCamera3Channel *channel =
703                static_cast<QCamera3Channel*>(b->stream->priv);
704        if (channel == NULL) {
705            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
706                    __func__, frameNumber, idx);
707            return BAD_VALUE;
708        }
709        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
710            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
711                    __func__, frameNumber, idx);
712            return BAD_VALUE;
713        }
714        if (b->release_fence != -1) {
715            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
716                    __func__, frameNumber, idx);
717            return BAD_VALUE;
718        }
719        if (b->buffer == NULL) {
720            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
721                    __func__, frameNumber, idx);
722            return BAD_VALUE;
723        }
724        idx++;
725        b = request->output_buffers + idx;
726    } while (idx < (ssize_t)request->num_output_buffers);
727
728    return NO_ERROR;
729}
730
731/*===========================================================================
732 * FUNCTION   : registerStreamBuffers
733 *
734 * DESCRIPTION: Register buffers for a given stream with the HAL device.
735 *
736 * PARAMETERS :
737 *   @stream_list : streams to be configured
738 *
739 * RETURN     :
740 *
741 *==========================================================================*/
742int QCamera3HardwareInterface::registerStreamBuffers(
743        const camera3_stream_buffer_set_t *buffer_set)
744{
745    int rc = 0;
746
747    pthread_mutex_lock(&mMutex);
748
749    if (buffer_set == NULL) {
750        ALOGE("%s: Invalid buffer_set parameter.", __func__);
751        pthread_mutex_unlock(&mMutex);
752        return -EINVAL;
753    }
754    if (buffer_set->stream == NULL) {
755        ALOGE("%s: Invalid stream parameter.", __func__);
756        pthread_mutex_unlock(&mMutex);
757        return -EINVAL;
758    }
759    if (buffer_set->num_buffers < 1) {
760        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
761        pthread_mutex_unlock(&mMutex);
762        return -EINVAL;
763    }
764    if (buffer_set->buffers == NULL) {
765        ALOGE("%s: Invalid buffers parameter.", __func__);
766        pthread_mutex_unlock(&mMutex);
767        return -EINVAL;
768    }
769
770    camera3_stream_t *stream = buffer_set->stream;
771    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
772
773    //set the buffer_set in the mStreamInfo array
774    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
775            it != mStreamInfo.end(); it++) {
776        if ((*it)->stream == stream) {
777            uint32_t numBuffers = buffer_set->num_buffers;
778            (*it)->buffer_set.stream = buffer_set->stream;
779            (*it)->buffer_set.num_buffers = numBuffers;
780            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
781            if ((*it)->buffer_set.buffers == NULL) {
782                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
783                pthread_mutex_unlock(&mMutex);
784                return -ENOMEM;
785            }
786            for (size_t j = 0; j < numBuffers; j++){
787                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
788            }
789            (*it)->registered = 1;
790        }
791    }
792    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
793    if (rc < 0) {
794        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
795        pthread_mutex_unlock(&mMutex);
796        return -ENODEV;
797    }
798
799    pthread_mutex_unlock(&mMutex);
800    return NO_ERROR;
801}
802
803/*===========================================================================
804 * FUNCTION   : processCaptureRequest
805 *
806 * DESCRIPTION: process a capture request from camera service
807 *
808 * PARAMETERS :
809 *   @request : request from framework to process
810 *
811 * RETURN     :
812 *
813 *==========================================================================*/
814int QCamera3HardwareInterface::processCaptureRequest(
815                    camera3_capture_request_t *request)
816{
817    int rc = NO_ERROR;
818    int32_t request_id;
819    CameraMetadata meta;
820
821    pthread_mutex_lock(&mMutex);
822
823    rc = validateCaptureRequest(request);
824    if (rc != NO_ERROR) {
825        ALOGE("%s: incoming request is not valid", __func__);
826        pthread_mutex_unlock(&mMutex);
827        return rc;
828    }
829
830    uint32_t frameNumber = request->frame_number;
831    uint32_t streamTypeMask = 0;
832
833    meta = request->settings;
834    if (meta.exists(ANDROID_REQUEST_ID)) {
835        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
836        mCurrentRequestId = request_id;
837        ALOGV("%s: Received request with id: %d",__func__, request_id);
838    } else if (mFirstRequest || mCurrentRequestId == -1){
839        ALOGE("%s: Unable to find request id field, \
840                & no previous id available", __func__);
841        return NAME_NOT_FOUND;
842    } else {
843        ALOGV("%s: Re-using old request id", __func__);
844        request_id = mCurrentRequestId;
845    }
846
847    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
848                                    __func__, __LINE__,
849                                    request->num_output_buffers,
850                                    request->input_buffer,
851                                    frameNumber);
852    // Acquire all request buffers first
853    int blob_request = 0;
854    for (size_t i = 0; i < request->num_output_buffers; i++) {
855        const camera3_stream_buffer_t& output = request->output_buffers[i];
856        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
857        sp<Fence> acquireFence = new Fence(output.acquire_fence);
858
859        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
860        //Call function to store local copy of jpeg data for encode params.
861            blob_request = 1;
862            rc = getJpegSettings(request->settings);
863            if (rc < 0) {
864                ALOGE("%s: failed to get jpeg parameters", __func__);
865                pthread_mutex_unlock(&mMutex);
866                return rc;
867            }
868        }
869
870        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
871        if (rc != OK) {
872            ALOGE("%s: fence wait failed %d", __func__, rc);
873            pthread_mutex_unlock(&mMutex);
874            return rc;
875        }
876        streamTypeMask |= channel->getStreamTypeMask();
877    }
878
879    rc = setFrameParameters(request->frame_number, request->settings, streamTypeMask);
880    if (rc < 0) {
881        ALOGE("%s: fail to set frame parameters", __func__);
882        pthread_mutex_unlock(&mMutex);
883        return rc;
884    }
885
886    /* Update pending request list and pending buffers map */
887    PendingRequestInfo pendingRequest;
888    pendingRequest.frame_number = frameNumber;
889    pendingRequest.num_buffers = request->num_output_buffers;
890    pendingRequest.request_id = request_id;
891    pendingRequest.blob_request = blob_request;
892
893    for (size_t i = 0; i < request->num_output_buffers; i++) {
894        RequestedBufferInfo requestedBuf;
895        requestedBuf.stream = request->output_buffers[i].stream;
896        requestedBuf.buffer = NULL;
897        pendingRequest.buffers.push_back(requestedBuf);
898
899        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
900    }
901    mPendingRequestsList.push_back(pendingRequest);
902
903    // Notify metadata channel we receive a request
904    mMetadataChannel->request(NULL, frameNumber);
905
906    // Call request on other streams
907    for (size_t i = 0; i < request->num_output_buffers; i++) {
908        const camera3_stream_buffer_t& output = request->output_buffers[i];
909        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
910        mm_camera_buf_def_t *pInputBuffer = NULL;
911
912        if (channel == NULL) {
913            ALOGE("%s: invalid channel pointer for stream", __func__);
914            continue;
915        }
916
917        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
918            QCamera3RegularChannel* inputChannel = NULL;
919            if(request->input_buffer != NULL){
920
921                //Try to get the internal format
922                inputChannel = (QCamera3RegularChannel*)
923                    request->input_buffer->stream->priv;
924                if(inputChannel == NULL ){
925                    ALOGE("%s: failed to get input channel handle", __func__);
926                } else {
927                    pInputBuffer =
928                        inputChannel->getInternalFormatBuffer(
929                                request->input_buffer->buffer);
930                    ALOGD("%s: Input buffer dump",__func__);
931                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
932                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
933                    ALOGD("frame len:%d", pInputBuffer->frame_len);
934                }
935            }
936            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
937                            pInputBuffer,(QCamera3Channel*)inputChannel);
938        } else {
939            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
940                __LINE__, output.buffer, frameNumber);
941            rc = channel->request(output.buffer, frameNumber);
942        }
943        if (rc < 0)
944            ALOGE("%s: request failed", __func__);
945    }
946
947    mFirstRequest = false;
948
949    //Block on conditional variable
950    mPendingRequest = 1;
951    while (mPendingRequest == 1) {
952        pthread_cond_wait(&mRequestCond, &mMutex);
953    }
954
955    pthread_mutex_unlock(&mMutex);
956    return rc;
957}
958
959/*===========================================================================
960 * FUNCTION   : getMetadataVendorTagOps
961 *
962 * DESCRIPTION:
963 *
964 * PARAMETERS :
965 *
966 *
967 * RETURN     :
968 *==========================================================================*/
969void QCamera3HardwareInterface::getMetadataVendorTagOps(
970                    vendor_tag_query_ops_t* /*ops*/)
971{
972    /* Enable locks when we eventually add Vendor Tags */
973    /*
974    pthread_mutex_lock(&mMutex);
975
976    pthread_mutex_unlock(&mMutex);
977    */
978    return;
979}
980
981/*===========================================================================
982 * FUNCTION   : dump
983 *
984 * DESCRIPTION:
985 *
986 * PARAMETERS :
987 *
988 *
989 * RETURN     :
990 *==========================================================================*/
991void QCamera3HardwareInterface::dump(int /*fd*/)
992{
993    /*Enable lock when we implement this function*/
994    /*
995    pthread_mutex_lock(&mMutex);
996
997    pthread_mutex_unlock(&mMutex);
998    */
999    return;
1000}
1001
1002
1003/*===========================================================================
1004 * FUNCTION   : captureResultCb
1005 *
1006 * DESCRIPTION: Callback handler for all capture result
1007 *              (streams, as well as metadata)
1008 *
1009 * PARAMETERS :
1010 *   @metadata : metadata information
1011 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1012 *               NULL if metadata.
1013 *
1014 * RETURN     : NONE
1015 *==========================================================================*/
1016void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1017                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1018{
1019    pthread_mutex_lock(&mMutex);
1020
1021    if (metadata_buf) {
1022        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1023        int32_t frame_number_valid = *(int32_t *)
1024            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1025        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1026            CAM_INTF_META_PENDING_REQUESTS, metadata);
1027        uint32_t frame_number = *(uint32_t *)
1028            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1029        const struct timeval *tv = (const struct timeval *)
1030            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1031        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1032            tv->tv_usec * NSEC_PER_USEC;
1033
1034        if (!frame_number_valid) {
1035            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1036            mMetadataChannel->bufDone(metadata_buf);
1037            goto done_metadata;
1038        }
1039        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1040                frame_number, capture_time);
1041
1042        // Go through the pending requests info and send shutter/results to frameworks
1043        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1044                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1045            camera3_capture_result_t result;
1046            camera3_notify_msg_t notify_msg;
1047            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1048
1049            // Flush out all entries with less or equal frame numbers.
1050
1051            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1052            //Right now it's the same as metadata timestamp
1053
1054            //TODO: When there is metadata drop, how do we derive the timestamp of
1055            //dropped frames? For now, we fake the dropped timestamp by substracting
1056            //from the reported timestamp
1057            nsecs_t current_capture_time = capture_time -
1058                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1059
1060            // Send shutter notify to frameworks
1061            notify_msg.type = CAMERA3_MSG_SHUTTER;
1062            notify_msg.message.shutter.frame_number = i->frame_number;
1063            notify_msg.message.shutter.timestamp = current_capture_time;
1064            mCallbackOps->notify(mCallbackOps, &notify_msg);
1065            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1066                    i->frame_number, capture_time);
1067
1068            // Send empty metadata with already filled buffers for dropped metadata
1069            // and send valid metadata with already filled buffers for current metadata
1070            if (i->frame_number < frame_number) {
1071                CameraMetadata dummyMetadata;
1072                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1073                        &current_capture_time, 1);
1074                dummyMetadata.update(ANDROID_REQUEST_ID,
1075                        &(i->request_id), 1);
1076                result.result = dummyMetadata.release();
1077            } else {
1078                result.result = translateCbMetadataToResultMetadata(metadata,
1079                        current_capture_time, i->request_id);
1080                if (i->blob_request && needReprocess()) {
1081                   //If it is a blob request then send the metadata to the picture channel
1082                   mPictureChannel->queueMetadata(metadata_buf);
1083
1084                } else {
1085                   // Return metadata buffer
1086                   mMetadataChannel->bufDone(metadata_buf);
1087                   free(metadata_buf);
1088                }
1089            }
1090            if (!result.result) {
1091                ALOGE("%s: metadata is NULL", __func__);
1092            }
1093            result.frame_number = i->frame_number;
1094            result.num_output_buffers = 0;
1095            result.output_buffers = NULL;
1096            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1097                    j != i->buffers.end(); j++) {
1098                if (j->buffer) {
1099                    result.num_output_buffers++;
1100                }
1101            }
1102
1103            if (result.num_output_buffers > 0) {
1104                camera3_stream_buffer_t *result_buffers =
1105                    new camera3_stream_buffer_t[result.num_output_buffers];
1106                if (!result_buffers) {
1107                    ALOGE("%s: Fatal error: out of memory", __func__);
1108                }
1109                size_t result_buffers_idx = 0;
1110                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1111                        j != i->buffers.end(); j++) {
1112                    if (j->buffer) {
1113                        result_buffers[result_buffers_idx++] = *(j->buffer);
1114                        free(j->buffer);
1115                        j->buffer = NULL;
1116                        mPendingBuffersMap.editValueFor(j->stream)--;
1117                    }
1118                }
1119                result.output_buffers = result_buffers;
1120
1121                mCallbackOps->process_capture_result(mCallbackOps, &result);
1122                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1123                        __func__, result.frame_number, current_capture_time);
1124                free_camera_metadata((camera_metadata_t *)result.result);
1125                delete[] result_buffers;
1126            } else {
1127                mCallbackOps->process_capture_result(mCallbackOps, &result);
1128                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1129                        __func__, result.frame_number, current_capture_time);
1130                free_camera_metadata((camera_metadata_t *)result.result);
1131            }
1132            // erase the element from the list
1133            i = mPendingRequestsList.erase(i);
1134        }
1135
1136
1137done_metadata:
1138        bool max_buffers_dequeued = false;
1139        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1140            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1141            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1142            if (queued_buffers == stream->max_buffers) {
1143                max_buffers_dequeued = true;
1144                break;
1145            }
1146        }
1147        if (!max_buffers_dequeued && !pending_requests) {
1148            // Unblock process_capture_request
1149            mPendingRequest = 0;
1150            pthread_cond_signal(&mRequestCond);
1151        }
1152    } else {
1153        // If the frame number doesn't exist in the pending request list,
1154        // directly send the buffer to the frameworks, and update pending buffers map
1155        // Otherwise, book-keep the buffer.
1156        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1157        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1158            i++;
1159        }
1160        if (i == mPendingRequestsList.end()) {
1161            // Verify all pending requests frame_numbers are greater
1162            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1163                    j != mPendingRequestsList.end(); j++) {
1164                if (j->frame_number < frame_number) {
1165                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1166                            __func__, j->frame_number, frame_number);
1167                }
1168            }
1169            camera3_capture_result_t result;
1170            result.result = NULL;
1171            result.frame_number = frame_number;
1172            result.num_output_buffers = 1;
1173            result.output_buffers = buffer;
1174            ALOGV("%s: result frame_number = %d, buffer = %p",
1175                    __func__, frame_number, buffer);
1176            mPendingBuffersMap.editValueFor(buffer->stream)--;
1177            mCallbackOps->process_capture_result(mCallbackOps, &result);
1178        } else {
1179            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1180                    j != i->buffers.end(); j++) {
1181                if (j->stream == buffer->stream) {
1182                    if (j->buffer != NULL) {
1183                        ALOGE("%s: Error: buffer is already set", __func__);
1184                    } else {
1185                        j->buffer = (camera3_stream_buffer_t *)malloc(
1186                                sizeof(camera3_stream_buffer_t));
1187                        *(j->buffer) = *buffer;
1188                        ALOGV("%s: cache buffer %p at result frame_number %d",
1189                                __func__, buffer, frame_number);
1190                    }
1191                }
1192            }
1193        }
1194    }
1195    pthread_mutex_unlock(&mMutex);
1196    return;
1197}
1198
1199/*===========================================================================
1200 * FUNCTION   : translateCbMetadataToResultMetadata
1201 *
1202 * DESCRIPTION:
1203 *
1204 * PARAMETERS :
1205 *   @metadata : metadata information from callback
1206 *
1207 * RETURN     : camera_metadata_t*
1208 *              metadata in a format specified by fwk
1209 *==========================================================================*/
1210camera_metadata_t*
1211QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1212                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1213                                 int32_t request_id)
1214{
1215    CameraMetadata camMetadata;
1216    camera_metadata_t* resultMetadata;
1217
1218    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1219    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1220
1221    /*CAM_INTF_META_HISTOGRAM - TODO*/
1222    /*cam_hist_stats_t  *histogram =
1223      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1224      metadata);*/
1225
1226    /*face detection*/
1227    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1228        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1229    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1230    int32_t faceIds[numFaces];
1231    uint8_t faceScores[numFaces];
1232    int32_t faceRectangles[numFaces * 4];
1233    int32_t faceLandmarks[numFaces * 6];
1234    int j = 0, k = 0;
1235    for (int i = 0; i < numFaces; i++) {
1236        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1237        faceScores[i] = faceDetectionInfo->faces[i].score;
1238        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1239                faceRectangles+j, -1);
1240        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1241        j+= 4;
1242        k+= 6;
1243    }
1244    if (numFaces > 0) {
1245        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1246        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1247        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1248            faceRectangles, numFaces*4);
1249        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1250            faceLandmarks, numFaces*6);
1251    }
1252
1253    uint8_t  *color_correct_mode =
1254        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1255    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1256
1257    int32_t  *ae_precapture_id =
1258        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1259    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1260
1261    /*aec regions*/
1262    cam_area_t  *hAeRegions =
1263        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1264    int32_t aeRegions[5];
1265    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1266    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1267    if(mIsZslMode) {
1268        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1269        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1270    } else {
1271        uint8_t *ae_state =
1272            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1273        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1274    }
1275    uint8_t  *focusMode =
1276        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1277    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1278
1279    /*af regions*/
1280    cam_area_t  *hAfRegions =
1281        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1282    int32_t afRegions[5];
1283    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1284    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1285
1286    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1287    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1288
1289    int32_t  *afTriggerId =
1290        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1291    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1292
1293    uint8_t  *whiteBalance =
1294        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1295    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1296
1297    /*awb regions*/
1298    cam_area_t  *hAwbRegions =
1299        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1300    int32_t awbRegions[5];
1301    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1302    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1303
1304    uint8_t  *whiteBalanceState =
1305        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1306    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1307
1308    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1309    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1310
1311    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1312    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1313
1314    uint8_t  *flashPower =
1315        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1316    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1317
1318    int64_t  *flashFiringTime =
1319        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1320    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1321
1322    /*int32_t  *ledMode =
1323      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1324      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1325
1326    uint8_t  *flashState =
1327        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1328    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1329
1330    uint8_t  *hotPixelMode =
1331        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1332    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1333
1334    float  *lensAperture =
1335        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1336    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1337
1338    float  *filterDensity =
1339        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1340    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1341
1342    float  *focalLength =
1343        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1344    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1345
1346    float  *focusDistance =
1347        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1348    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1349
1350    float  *focusRange =
1351        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1352    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1353
1354    uint8_t  *opticalStab =
1355        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1356    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1357
1358    /*int32_t  *focusState =
1359      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1360      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1361
1362    uint8_t  *noiseRedMode =
1363        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1364    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1365
1366    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1367
1368    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1369        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1370    int32_t scalerCropRegion[4];
1371    scalerCropRegion[0] = hScalerCropRegion->left;
1372    scalerCropRegion[1] = hScalerCropRegion->top;
1373    scalerCropRegion[2] = hScalerCropRegion->width;
1374    scalerCropRegion[3] = hScalerCropRegion->height;
1375    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1376
1377    int64_t  *sensorExpTime =
1378        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1379    mMetadataResponse.exposure_time = *sensorExpTime;
1380    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1381
1382    int64_t  *sensorFameDuration =
1383        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1384    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1385
1386    int32_t  *sensorSensitivity =
1387        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1388    mMetadataResponse.iso_speed = *sensorSensitivity;
1389    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1390
1391    uint8_t  *shadingMode =
1392        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1393    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1394
1395    uint8_t  *faceDetectMode =
1396        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1397    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1398        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1399        *faceDetectMode);
1400    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1401
1402    uint8_t  *histogramMode =
1403        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1404    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1405
1406    uint8_t  *sharpnessMapMode =
1407        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1408    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1409            sharpnessMapMode, 1);
1410
1411    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1412    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1413        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1414    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1415            (int32_t*)sharpnessMap->sharpness,
1416            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1417
1418    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1419        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1420    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1421    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1422    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1423                       (float*)lensShadingMap->lens_shading,
1424                       4*map_width*map_height);
1425
1426    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1427        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1428    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1429
1430    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1431        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1432    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1433                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1434
1435    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1436        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1437    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1438                       predColorCorrectionGains->gains, 4);
1439
1440    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1441        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1442    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1443                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1444
1445    uint8_t *blackLevelLock = (uint8_t*)
1446        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1447    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1448
1449    uint8_t *sceneFlicker = (uint8_t*)
1450        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1451    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1452
1453
1454    resultMetadata = camMetadata.release();
1455    return resultMetadata;
1456}
1457
1458/*===========================================================================
1459 * FUNCTION   : convertToRegions
1460 *
1461 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1462 *
1463 * PARAMETERS :
1464 *   @rect   : cam_rect_t struct to convert
1465 *   @region : int32_t destination array
1466 *   @weight : if we are converting from cam_area_t, weight is valid
1467 *             else weight = -1
1468 *
1469 *==========================================================================*/
1470void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1471    region[0] = rect.left;
1472    region[1] = rect.top;
1473    region[2] = rect.left + rect.width;
1474    region[3] = rect.top + rect.height;
1475    if (weight > -1) {
1476        region[4] = weight;
1477    }
1478}
1479
1480/*===========================================================================
1481 * FUNCTION   : convertFromRegions
1482 *
1483 * DESCRIPTION: helper method to convert from array to cam_rect_t
1484 *
1485 * PARAMETERS :
1486 *   @rect   : cam_rect_t struct to convert
1487 *   @region : int32_t destination array
1488 *   @weight : if we are converting from cam_area_t, weight is valid
1489 *             else weight = -1
1490 *
1491 *==========================================================================*/
1492void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1493                                                   const camera_metadata_t *settings,
1494                                                   uint32_t tag){
1495    CameraMetadata frame_settings;
1496    frame_settings = settings;
1497    int32_t x_min = frame_settings.find(tag).data.i32[0];
1498    int32_t y_min = frame_settings.find(tag).data.i32[1];
1499    int32_t x_max = frame_settings.find(tag).data.i32[2];
1500    int32_t y_max = frame_settings.find(tag).data.i32[3];
1501    roi->weight = frame_settings.find(tag).data.i32[4];
1502    roi->rect.left = x_min;
1503    roi->rect.top = y_min;
1504    roi->rect.width = x_max - x_min;
1505    roi->rect.height = y_max - y_min;
1506}
1507
1508/*===========================================================================
1509 * FUNCTION   : resetIfNeededROI
1510 *
1511 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1512 *              crop region
1513 *
1514 * PARAMETERS :
1515 *   @roi       : cam_area_t struct to resize
1516 *   @scalerCropRegion : cam_crop_region_t region to compare against
1517 *
1518 *
1519 *==========================================================================*/
1520bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1521                                                 const cam_crop_region_t* scalerCropRegion)
1522{
1523    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1524    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1525    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1526    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1527    if ((roi_x_max < scalerCropRegion->left) ||
1528        (roi_y_max < scalerCropRegion->top)  ||
1529        (roi->rect.left > crop_x_max) ||
1530        (roi->rect.top > crop_y_max)){
1531        return false;
1532    }
1533    if (roi->rect.left < scalerCropRegion->left) {
1534        roi->rect.left = scalerCropRegion->left;
1535    }
1536    if (roi->rect.top < scalerCropRegion->top) {
1537        roi->rect.top = scalerCropRegion->top;
1538    }
1539    if (roi_x_max > crop_x_max) {
1540        roi_x_max = crop_x_max;
1541    }
1542    if (roi_y_max > crop_y_max) {
1543        roi_y_max = crop_y_max;
1544    }
1545    roi->rect.width = roi_x_max - roi->rect.left;
1546    roi->rect.height = roi_y_max - roi->rect.top;
1547    return true;
1548}
1549
1550/*===========================================================================
1551 * FUNCTION   : convertLandmarks
1552 *
1553 * DESCRIPTION: helper method to extract the landmarks from face detection info
1554 *
1555 * PARAMETERS :
1556 *   @face   : cam_rect_t struct to convert
1557 *   @landmarks : int32_t destination array
1558 *
1559 *
1560 *==========================================================================*/
1561void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1562{
1563    landmarks[0] = face.left_eye_center.x;
1564    landmarks[1] = face.left_eye_center.y;
1565    landmarks[2] = face.right_eye_center.y;
1566    landmarks[3] = face.right_eye_center.y;
1567    landmarks[4] = face.mouth_center.x;
1568    landmarks[5] = face.mouth_center.y;
1569}
1570
1571#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1572/*===========================================================================
1573 * FUNCTION   : initCapabilities
1574 *
1575 * DESCRIPTION: initialize camera capabilities in static data struct
1576 *
1577 * PARAMETERS :
1578 *   @cameraId  : camera Id
1579 *
1580 * RETURN     : int32_t type of status
1581 *              NO_ERROR  -- success
1582 *              none-zero failure code
1583 *==========================================================================*/
1584int QCamera3HardwareInterface::initCapabilities(int cameraId)
1585{
1586    int rc = 0;
1587    mm_camera_vtbl_t *cameraHandle = NULL;
1588    QCamera3HeapMemory *capabilityHeap = NULL;
1589
1590    cameraHandle = camera_open(cameraId);
1591    if (!cameraHandle) {
1592        ALOGE("%s: camera_open failed", __func__);
1593        rc = -1;
1594        goto open_failed;
1595    }
1596
1597    capabilityHeap = new QCamera3HeapMemory();
1598    if (capabilityHeap == NULL) {
1599        ALOGE("%s: creation of capabilityHeap failed", __func__);
1600        goto heap_creation_failed;
1601    }
1602    /* Allocate memory for capability buffer */
1603    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1604    if(rc != OK) {
1605        ALOGE("%s: No memory for cappability", __func__);
1606        goto allocate_failed;
1607    }
1608
1609    /* Map memory for capability buffer */
1610    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1611    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1612                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1613                                capabilityHeap->getFd(0),
1614                                sizeof(cam_capability_t));
1615    if(rc < 0) {
1616        ALOGE("%s: failed to map capability buffer", __func__);
1617        goto map_failed;
1618    }
1619
1620    /* Query Capability */
1621    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1622    if(rc < 0) {
1623        ALOGE("%s: failed to query capability",__func__);
1624        goto query_failed;
1625    }
1626    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1627    if (!gCamCapability[cameraId]) {
1628        ALOGE("%s: out of memory", __func__);
1629        goto query_failed;
1630    }
1631    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1632                                        sizeof(cam_capability_t));
1633    rc = 0;
1634
1635query_failed:
1636    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1637                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1638map_failed:
1639    capabilityHeap->deallocate();
1640allocate_failed:
1641    delete capabilityHeap;
1642heap_creation_failed:
1643    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1644    cameraHandle = NULL;
1645open_failed:
1646    return rc;
1647}
1648
1649/*===========================================================================
1650 * FUNCTION   : initParameters
1651 *
1652 * DESCRIPTION: initialize camera parameters
1653 *
1654 * PARAMETERS :
1655 *
1656 * RETURN     : int32_t type of status
1657 *              NO_ERROR  -- success
1658 *              none-zero failure code
1659 *==========================================================================*/
1660int QCamera3HardwareInterface::initParameters()
1661{
1662    int rc = 0;
1663
1664    //Allocate Set Param Buffer
1665    mParamHeap = new QCamera3HeapMemory();
1666    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1667    if(rc != OK) {
1668        rc = NO_MEMORY;
1669        ALOGE("Failed to allocate SETPARM Heap memory");
1670        delete mParamHeap;
1671        mParamHeap = NULL;
1672        return rc;
1673    }
1674
1675    //Map memory for parameters buffer
1676    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1677            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1678            mParamHeap->getFd(0),
1679            sizeof(parm_buffer_t));
1680    if(rc < 0) {
1681        ALOGE("%s:failed to map SETPARM buffer",__func__);
1682        rc = FAILED_TRANSACTION;
1683        mParamHeap->deallocate();
1684        delete mParamHeap;
1685        mParamHeap = NULL;
1686        return rc;
1687    }
1688
1689    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1690    return rc;
1691}
1692
1693/*===========================================================================
1694 * FUNCTION   : deinitParameters
1695 *
1696 * DESCRIPTION: de-initialize camera parameters
1697 *
1698 * PARAMETERS :
1699 *
1700 * RETURN     : NONE
1701 *==========================================================================*/
1702void QCamera3HardwareInterface::deinitParameters()
1703{
1704    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1705            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1706
1707    mParamHeap->deallocate();
1708    delete mParamHeap;
1709    mParamHeap = NULL;
1710
1711    mParameters = NULL;
1712}
1713
1714/*===========================================================================
1715 * FUNCTION   : calcMaxJpegSize
1716 *
1717 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1718 *
1719 * PARAMETERS :
1720 *
1721 * RETURN     : max_jpeg_size
1722 *==========================================================================*/
1723int QCamera3HardwareInterface::calcMaxJpegSize()
1724{
1725    int32_t max_jpeg_size = 0;
1726    int temp_width, temp_height;
1727    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1728        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1729        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1730        if (temp_width * temp_height > max_jpeg_size ) {
1731            max_jpeg_size = temp_width * temp_height;
1732        }
1733    }
1734    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1735    return max_jpeg_size;
1736}
1737
1738/*===========================================================================
1739 * FUNCTION   : initStaticMetadata
1740 *
1741 * DESCRIPTION: initialize the static metadata
1742 *
1743 * PARAMETERS :
1744 *   @cameraId  : camera Id
1745 *
1746 * RETURN     : int32_t type of status
1747 *              0  -- success
1748 *              non-zero failure code
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1751{
1752    int rc = 0;
1753    CameraMetadata staticInfo;
1754
1755    /* android.info: hardware level */
1756    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1757    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1758        &supportedHardwareLevel, 1);
1759
1760    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1761    /*HAL 3 only*/
1762    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1763                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1764
1765    /*hard coded for now but this should come from sensor*/
1766    float min_focus_distance;
1767    if(facingBack){
1768        min_focus_distance = 10;
1769    } else {
1770        min_focus_distance = 0;
1771    }
1772    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1773                    &min_focus_distance, 1);
1774
1775    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1776                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1777
1778    /*should be using focal lengths but sensor doesn't provide that info now*/
1779    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1780                      &gCamCapability[cameraId]->focal_length,
1781                      1);
1782
1783    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1784                      gCamCapability[cameraId]->apertures,
1785                      gCamCapability[cameraId]->apertures_count);
1786
1787    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1788                gCamCapability[cameraId]->filter_densities,
1789                gCamCapability[cameraId]->filter_densities_count);
1790
1791
1792    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1793                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1794                      gCamCapability[cameraId]->optical_stab_modes_count);
1795
1796    staticInfo.update(ANDROID_LENS_POSITION,
1797                      gCamCapability[cameraId]->lens_position,
1798                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1799
1800    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1801                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1802    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1803                      lens_shading_map_size,
1804                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1805
1806    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1807                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1808    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1809            geo_correction_map_size,
1810            sizeof(geo_correction_map_size)/sizeof(int32_t));
1811
1812    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1813                       gCamCapability[cameraId]->geo_correction_map,
1814                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1815
1816    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1817            gCamCapability[cameraId]->sensor_physical_size, 2);
1818
1819    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1820            gCamCapability[cameraId]->exposure_time_range, 2);
1821
1822    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1823            &gCamCapability[cameraId]->max_frame_duration, 1);
1824
1825
1826    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1827                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1828
1829    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1830                                               gCamCapability[cameraId]->pixel_array_size.height};
1831    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1832                      pixel_array_size, 2);
1833
1834    int32_t active_array_size[] = {0, 0,
1835                                                gCamCapability[cameraId]->active_array_size.width,
1836                                                gCamCapability[cameraId]->active_array_size.height};
1837    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1838                      active_array_size, 4);
1839
1840    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1841            &gCamCapability[cameraId]->white_level, 1);
1842
1843    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1844            gCamCapability[cameraId]->black_level_pattern, 4);
1845
1846    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1847                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1848
1849    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1850                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1851
1852    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1853                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
1854
1855    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1856                      &gCamCapability[cameraId]->histogram_size, 1);
1857
1858    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1859            &gCamCapability[cameraId]->max_histogram_count, 1);
1860
1861    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1862                                                gCamCapability[cameraId]->sharpness_map_size.height};
1863
1864    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1865            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1866
1867    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1868            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1869
1870
1871    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1872                      &gCamCapability[cameraId]->raw_min_duration,
1873                       1);
1874
1875    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1876                                                HAL_PIXEL_FORMAT_BLOB};
1877    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1878    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1879                      scalar_formats,
1880                      scalar_formats_count);
1881
1882    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1883    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1884              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1885              available_processed_sizes);
1886    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1887                available_processed_sizes,
1888                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
1889
1890    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1891                      &gCamCapability[cameraId]->jpeg_min_duration[0],
1892                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
1893
1894    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1895    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1896                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1897                 available_fps_ranges);
1898    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1899            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1900
1901    camera_metadata_rational exposureCompensationStep = {
1902            gCamCapability[cameraId]->exp_compensation_step.numerator,
1903            gCamCapability[cameraId]->exp_compensation_step.denominator};
1904    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1905                      &exposureCompensationStep, 1);
1906
1907    /*TO DO*/
1908    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1909    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1910                      availableVstabModes, sizeof(availableVstabModes));
1911
1912    /*HAL 1 and HAL 3 common*/
1913    float maxZoom = 4;
1914    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1915            &maxZoom, 1);
1916
1917    int32_t max3aRegions = 1;
1918    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1919            &max3aRegions, 1);
1920
1921    uint8_t availableFaceDetectModes[] = {
1922            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1923            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
1924    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1925                      availableFaceDetectModes,
1926                      sizeof(availableFaceDetectModes));
1927
1928    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1929                                       gCamCapability[cameraId]->raw_dim.height};
1930    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1931                      raw_size,
1932                      sizeof(raw_size)/sizeof(uint32_t));
1933
1934    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1935                                                        gCamCapability[cameraId]->exposure_compensation_max};
1936    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1937            exposureCompensationRange,
1938            sizeof(exposureCompensationRange)/sizeof(int32_t));
1939
1940    uint8_t lensFacing = (facingBack) ?
1941            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1942    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1943
1944    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1945                available_processed_sizes,
1946                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1947
1948    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1949                      available_thumbnail_sizes,
1950                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1951
1952    int32_t max_jpeg_size = 0;
1953    int temp_width, temp_height;
1954    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1955        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1956        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1957        if (temp_width * temp_height > max_jpeg_size ) {
1958            max_jpeg_size = temp_width * temp_height;
1959        }
1960    }
1961    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1962    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1963                      &max_jpeg_size, 1);
1964
1965    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1966    int32_t size = 0;
1967    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1968        int val = lookupFwkName(EFFECT_MODES_MAP,
1969                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1970                                   gCamCapability[cameraId]->supported_effects[i]);
1971        if (val != NAME_NOT_FOUND) {
1972            avail_effects[size] = (uint8_t)val;
1973            size++;
1974        }
1975    }
1976    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1977                      avail_effects,
1978                      size);
1979
1980    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1981    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1982    int32_t supported_scene_modes_cnt = 0;
1983    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1984        int val = lookupFwkName(SCENE_MODES_MAP,
1985                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1986                                gCamCapability[cameraId]->supported_scene_modes[i]);
1987        if (val != NAME_NOT_FOUND) {
1988            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1989            supported_indexes[supported_scene_modes_cnt] = i;
1990            supported_scene_modes_cnt++;
1991        }
1992    }
1993
1994    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1995                      avail_scene_modes,
1996                      supported_scene_modes_cnt);
1997
1998    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1999    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2000                      supported_scene_modes_cnt,
2001                      scene_mode_overrides,
2002                      supported_indexes,
2003                      cameraId);
2004    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2005                      scene_mode_overrides,
2006                      supported_scene_modes_cnt*3);
2007
2008    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2009    size = 0;
2010    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2011        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2012                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2013                                 gCamCapability[cameraId]->supported_antibandings[i]);
2014        if (val != NAME_NOT_FOUND) {
2015            avail_antibanding_modes[size] = (uint8_t)val;
2016            size++;
2017        }
2018
2019    }
2020    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2021                      avail_antibanding_modes,
2022                      size);
2023
2024    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2025    size = 0;
2026    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2027        int val = lookupFwkName(FOCUS_MODES_MAP,
2028                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2029                                gCamCapability[cameraId]->supported_focus_modes[i]);
2030        if (val != NAME_NOT_FOUND) {
2031            avail_af_modes[size] = (uint8_t)val;
2032            size++;
2033        }
2034    }
2035    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2036                      avail_af_modes,
2037                      size);
2038
2039    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2040    size = 0;
2041    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2042        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2043                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2044                                    gCamCapability[cameraId]->supported_white_balances[i]);
2045        if (val != NAME_NOT_FOUND) {
2046            avail_awb_modes[size] = (uint8_t)val;
2047            size++;
2048        }
2049    }
2050    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2051                      avail_awb_modes,
2052                      size);
2053
2054    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2055    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2056      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2057
2058    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2059            available_flash_levels,
2060            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2061
2062
2063    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2064    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2065            &flashAvailable, 1);
2066
2067    uint8_t avail_ae_modes[5];
2068    size = 0;
2069    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2070        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2071        size++;
2072    }
2073    if (flashAvailable) {
2074        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2075        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2076        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2077    }
2078    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2079                      avail_ae_modes,
2080                      size);
2081
2082    int32_t sensitivity_range[2];
2083    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2084    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2085    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2086                      sensitivity_range,
2087                      sizeof(sensitivity_range) / sizeof(int32_t));
2088
2089    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2090                      &gCamCapability[cameraId]->max_analog_sensitivity,
2091                      1);
2092
2093    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2094                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2095                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2096
2097    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2098    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2099                      &sensor_orientation,
2100                      1);
2101
2102    int32_t max_output_streams[3] = {1, 3, 1};
2103    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2104                      max_output_streams,
2105                      3);
2106
2107    gStaticMetadata[cameraId] = staticInfo.release();
2108    return rc;
2109}
2110
2111/*===========================================================================
2112 * FUNCTION   : makeTable
2113 *
2114 * DESCRIPTION: make a table of sizes
2115 *
2116 * PARAMETERS :
2117 *
2118 *
2119 *==========================================================================*/
2120void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2121                                          int32_t* sizeTable)
2122{
2123    int j = 0;
2124    for (int i = 0; i < size; i++) {
2125        sizeTable[j] = dimTable[i].width;
2126        sizeTable[j+1] = dimTable[i].height;
2127        j+=2;
2128    }
2129}
2130
2131/*===========================================================================
2132 * FUNCTION   : makeFPSTable
2133 *
2134 * DESCRIPTION: make a table of fps ranges
2135 *
2136 * PARAMETERS :
2137 *
2138 *==========================================================================*/
2139void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2140                                          int32_t* fpsRangesTable)
2141{
2142    int j = 0;
2143    for (int i = 0; i < size; i++) {
2144        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2145        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2146        j+=2;
2147    }
2148}
2149
2150/*===========================================================================
2151 * FUNCTION   : makeOverridesList
2152 *
2153 * DESCRIPTION: make a list of scene mode overrides
2154 *
2155 * PARAMETERS :
2156 *
2157 *
2158 *==========================================================================*/
2159void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2160                                                  uint8_t size, uint8_t* overridesList,
2161                                                  uint8_t* supported_indexes,
2162                                                  int camera_id)
2163{
2164    /*daemon will give a list of overrides for all scene modes.
2165      However we should send the fwk only the overrides for the scene modes
2166      supported by the framework*/
2167    int j = 0, index = 0, supt = 0;
2168    uint8_t focus_override;
2169    for (int i = 0; i < size; i++) {
2170        supt = 0;
2171        index = supported_indexes[i];
2172        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2173        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2174                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2175                                                    overridesTable[index].awb_mode);
2176        focus_override = (uint8_t)overridesTable[index].af_mode;
2177        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2178           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2179              supt = 1;
2180              break;
2181           }
2182        }
2183        if (supt) {
2184           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2185                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2186                                              focus_override);
2187        } else {
2188           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2189        }
2190        j+=3;
2191    }
2192}
2193
2194/*===========================================================================
2195 * FUNCTION   : getPreviewHalPixelFormat
2196 *
2197 * DESCRIPTION: convert the format to type recognized by framework
2198 *
2199 * PARAMETERS : format : the format from backend
2200 *
2201 ** RETURN    : format recognized by framework
2202 *
2203 *==========================================================================*/
2204int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2205{
2206    int32_t halPixelFormat;
2207
2208    switch (format) {
2209    case CAM_FORMAT_YUV_420_NV12:
2210        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2211        break;
2212    case CAM_FORMAT_YUV_420_NV21:
2213        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2214        break;
2215    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2216        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2217        break;
2218    case CAM_FORMAT_YUV_420_YV12:
2219        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2220        break;
2221    case CAM_FORMAT_YUV_422_NV16:
2222    case CAM_FORMAT_YUV_422_NV61:
2223    default:
2224        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2225        break;
2226    }
2227    return halPixelFormat;
2228}
2229
2230/*===========================================================================
2231 * FUNCTION   : getSensorSensitivity
2232 *
2233 * DESCRIPTION: convert iso_mode to an integer value
2234 *
2235 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2236 *
2237 ** RETURN    : sensitivity supported by sensor
2238 *
2239 *==========================================================================*/
2240int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2241{
2242    int32_t sensitivity;
2243
2244    switch (iso_mode) {
2245    case CAM_ISO_MODE_100:
2246        sensitivity = 100;
2247        break;
2248    case CAM_ISO_MODE_200:
2249        sensitivity = 200;
2250        break;
2251    case CAM_ISO_MODE_400:
2252        sensitivity = 400;
2253        break;
2254    case CAM_ISO_MODE_800:
2255        sensitivity = 800;
2256        break;
2257    case CAM_ISO_MODE_1600:
2258        sensitivity = 1600;
2259        break;
2260    default:
2261        sensitivity = -1;
2262        break;
2263    }
2264    return sensitivity;
2265}
2266
2267
2268/*===========================================================================
2269 * FUNCTION   : AddSetParmEntryToBatch
2270 *
2271 * DESCRIPTION: add set parameter entry into batch
2272 *
2273 * PARAMETERS :
2274 *   @p_table     : ptr to parameter buffer
2275 *   @paramType   : parameter type
2276 *   @paramLength : length of parameter value
2277 *   @paramValue  : ptr to parameter value
2278 *
2279 * RETURN     : int32_t type of status
2280 *              NO_ERROR  -- success
2281 *              none-zero failure code
2282 *==========================================================================*/
2283int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2284                                                          cam_intf_parm_type_t paramType,
2285                                                          uint32_t paramLength,
2286                                                          void *paramValue)
2287{
2288    int position = paramType;
2289    int current, next;
2290
2291    /*************************************************************************
2292    *                 Code to take care of linking next flags                *
2293    *************************************************************************/
2294    current = GET_FIRST_PARAM_ID(p_table);
2295    if (position == current){
2296        //DO NOTHING
2297    } else if (position < current){
2298        SET_NEXT_PARAM_ID(position, p_table, current);
2299        SET_FIRST_PARAM_ID(p_table, position);
2300    } else {
2301        /* Search for the position in the linked list where we need to slot in*/
2302        while (position > GET_NEXT_PARAM_ID(current, p_table))
2303            current = GET_NEXT_PARAM_ID(current, p_table);
2304
2305        /*If node already exists no need to alter linking*/
2306        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2307            next = GET_NEXT_PARAM_ID(current, p_table);
2308            SET_NEXT_PARAM_ID(current, p_table, position);
2309            SET_NEXT_PARAM_ID(position, p_table, next);
2310        }
2311    }
2312
2313    /*************************************************************************
2314    *                   Copy contents into entry                             *
2315    *************************************************************************/
2316
2317    if (paramLength > sizeof(parm_type_t)) {
2318        ALOGE("%s:Size of input larger than max entry size",__func__);
2319        return BAD_VALUE;
2320    }
2321    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2322    return NO_ERROR;
2323}
2324
2325/*===========================================================================
2326 * FUNCTION   : lookupFwkName
2327 *
2328 * DESCRIPTION: In case the enum is not same in fwk and backend
2329 *              make sure the parameter is correctly propogated
2330 *
2331 * PARAMETERS  :
2332 *   @arr      : map between the two enums
2333 *   @len      : len of the map
2334 *   @hal_name : name of the hal_parm to map
2335 *
2336 * RETURN     : int type of status
2337 *              fwk_name  -- success
2338 *              none-zero failure code
2339 *==========================================================================*/
2340int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2341                                             int len, int hal_name)
2342{
2343
2344    for (int i = 0; i < len; i++) {
2345        if (arr[i].hal_name == hal_name)
2346            return arr[i].fwk_name;
2347    }
2348
2349    /* Not able to find matching framework type is not necessarily
2350     * an error case. This happens when mm-camera supports more attributes
2351     * than the frameworks do */
2352    ALOGD("%s: Cannot find matching framework type", __func__);
2353    return NAME_NOT_FOUND;
2354}
2355
2356/*===========================================================================
2357 * FUNCTION   : lookupHalName
2358 *
2359 * DESCRIPTION: In case the enum is not same in fwk and backend
2360 *              make sure the parameter is correctly propogated
2361 *
2362 * PARAMETERS  :
2363 *   @arr      : map between the two enums
2364 *   @len      : len of the map
2365 *   @fwk_name : name of the hal_parm to map
2366 *
2367 * RETURN     : int32_t type of status
2368 *              hal_name  -- success
2369 *              none-zero failure code
2370 *==========================================================================*/
2371int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2372                                             int len, int fwk_name)
2373{
2374    for (int i = 0; i < len; i++) {
2375       if (arr[i].fwk_name == fwk_name)
2376           return arr[i].hal_name;
2377    }
2378    ALOGE("%s: Cannot find matching hal type", __func__);
2379    return NAME_NOT_FOUND;
2380}
2381
2382/*===========================================================================
2383 * FUNCTION   : getCapabilities
2384 *
2385 * DESCRIPTION: query camera capabilities
2386 *
2387 * PARAMETERS :
2388 *   @cameraId  : camera Id
2389 *   @info      : camera info struct to be filled in with camera capabilities
2390 *
2391 * RETURN     : int32_t type of status
2392 *              NO_ERROR  -- success
2393 *              none-zero failure code
2394 *==========================================================================*/
2395int QCamera3HardwareInterface::getCamInfo(int cameraId,
2396                                    struct camera_info *info)
2397{
2398    int rc = 0;
2399
2400    if (NULL == gCamCapability[cameraId]) {
2401        rc = initCapabilities(cameraId);
2402        if (rc < 0) {
2403            //pthread_mutex_unlock(&g_camlock);
2404            return rc;
2405        }
2406    }
2407
2408    if (NULL == gStaticMetadata[cameraId]) {
2409        rc = initStaticMetadata(cameraId);
2410        if (rc < 0) {
2411            return rc;
2412        }
2413    }
2414
2415    switch(gCamCapability[cameraId]->position) {
2416    case CAM_POSITION_BACK:
2417        info->facing = CAMERA_FACING_BACK;
2418        break;
2419
2420    case CAM_POSITION_FRONT:
2421        info->facing = CAMERA_FACING_FRONT;
2422        break;
2423
2424    default:
2425        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2426        rc = -1;
2427        break;
2428    }
2429
2430
2431    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2432    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2433    info->static_camera_characteristics = gStaticMetadata[cameraId];
2434
2435    return rc;
2436}
2437
2438/*===========================================================================
2439 * FUNCTION   : translateMetadata
2440 *
2441 * DESCRIPTION: translate the metadata into camera_metadata_t
2442 *
2443 * PARAMETERS : type of the request
2444 *
2445 *
2446 * RETURN     : success: camera_metadata_t*
2447 *              failure: NULL
2448 *
2449 *==========================================================================*/
2450camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2451{
2452    pthread_mutex_lock(&mMutex);
2453
2454    if (mDefaultMetadata[type] != NULL) {
2455        pthread_mutex_unlock(&mMutex);
2456        return mDefaultMetadata[type];
2457    }
2458    //first time we are handling this request
2459    //fill up the metadata structure using the wrapper class
2460    CameraMetadata settings;
2461    //translate from cam_capability_t to camera_metadata_tag_t
2462    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2463    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2464
2465    /*control*/
2466
2467    uint8_t controlIntent = 0;
2468    switch (type) {
2469      case CAMERA3_TEMPLATE_PREVIEW:
2470        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2471        break;
2472      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2473        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2474        break;
2475      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2476        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2477        break;
2478      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2479        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2480        break;
2481      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2482        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2483        break;
2484      default:
2485        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2486        break;
2487    }
2488    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2489
2490    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2491            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2492
2493    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2494    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2495
2496    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2497    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2498
2499    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2500    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2501
2502    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2503    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2504
2505    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2506    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2507
2508    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2509    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2510
2511    static uint8_t focusMode;
2512    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2513        ALOGE("%s: Setting focus mode to auto", __func__);
2514        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2515    } else {
2516        ALOGE("%s: Setting focus mode to off", __func__);
2517        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2518    }
2519    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2520
2521    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2522    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2523
2524    /*flash*/
2525    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2526    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2527
2528    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2529    settings.update(ANDROID_FLASH_FIRING_POWER,
2530            &flashFiringLevel, 1);
2531
2532    /* lens */
2533    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2534    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2535
2536    if (gCamCapability[mCameraId]->filter_densities_count) {
2537        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2538        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2539                        gCamCapability[mCameraId]->filter_densities_count);
2540    }
2541
2542    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2543    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2544
2545    mDefaultMetadata[type] = settings.release();
2546
2547    pthread_mutex_unlock(&mMutex);
2548    return mDefaultMetadata[type];
2549}
2550
2551/*===========================================================================
2552 * FUNCTION   : setFrameParameters
2553 *
2554 * DESCRIPTION: set parameters per frame as requested in the metadata from
2555 *              framework
2556 *
2557 * PARAMETERS :
2558 *   @frame_id  : frame number for this particular request
2559 *   @settings  : frame settings information from framework
2560 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2561 *
2562 * RETURN     : success: NO_ERROR
2563 *              failure:
2564 *==========================================================================*/
2565int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2566                    const camera_metadata_t *settings, uint32_t streamTypeMask)
2567{
2568    /*translate from camera_metadata_t type to parm_type_t*/
2569    int rc = 0;
2570    if (settings == NULL && mFirstRequest) {
2571        /*settings cannot be null for the first request*/
2572        return BAD_VALUE;
2573    }
2574
2575    int32_t hal_version = CAM_HAL_V3;
2576
2577    memset(mParameters, 0, sizeof(parm_buffer_t));
2578    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2579    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2580                sizeof(hal_version), &hal_version);
2581
2582    /*we need to update the frame number in the parameters*/
2583    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2584                                sizeof(frame_id), &frame_id);
2585    if (rc < 0) {
2586        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2587        return BAD_VALUE;
2588    }
2589
2590    /* Update stream id mask where buffers are requested */
2591    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2592                                sizeof(streamTypeMask), &streamTypeMask);
2593    if (rc < 0) {
2594        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2595        return BAD_VALUE;
2596    }
2597
2598    if(settings != NULL){
2599        rc = translateMetadataToParameters(settings);
2600    }
2601    /*set the parameters to backend*/
2602    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2603    return rc;
2604}
2605
2606/*===========================================================================
2607 * FUNCTION   : translateMetadataToParameters
2608 *
2609 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2610 *
2611 *
2612 * PARAMETERS :
2613 *   @settings  : frame settings information from framework
2614 *
2615 *
2616 * RETURN     : success: NO_ERROR
2617 *              failure:
2618 *==========================================================================*/
2619int QCamera3HardwareInterface::translateMetadataToParameters
2620                                  (const camera_metadata_t *settings)
2621{
2622    int rc = 0;
2623    CameraMetadata frame_settings;
2624    frame_settings = settings;
2625
2626
2627    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2628        int32_t antibandingMode =
2629            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2630        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2631                sizeof(antibandingMode), &antibandingMode);
2632    }
2633
2634    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2635        int32_t expCompensation = frame_settings.find(
2636            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2637        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2638            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2639        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2640            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2641        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2642          sizeof(expCompensation), &expCompensation);
2643    }
2644
2645    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2646        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2647        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2648                sizeof(aeLock), &aeLock);
2649    }
2650    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2651        cam_fps_range_t fps_range;
2652        fps_range.min_fps =
2653            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2654        fps_range.max_fps =
2655            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2656        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2657                sizeof(fps_range), &fps_range);
2658    }
2659
2660    float focalDistance = -1.0;
2661    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2662        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2663        rc = AddSetParmEntryToBatch(mParameters,
2664                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2665                sizeof(focalDistance), &focalDistance);
2666    }
2667
2668    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2669        uint8_t fwk_focusMode =
2670            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2671        uint8_t focusMode;
2672        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2673            focusMode = CAM_FOCUS_MODE_INFINITY;
2674        } else{
2675         focusMode = lookupHalName(FOCUS_MODES_MAP,
2676                                   sizeof(FOCUS_MODES_MAP),
2677                                   fwk_focusMode);
2678        }
2679        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2680                sizeof(focusMode), &focusMode);
2681    }
2682
2683    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2684        uint8_t awbLock =
2685            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2686        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2687                sizeof(awbLock), &awbLock);
2688    }
2689
2690    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2691        uint8_t fwk_whiteLevel =
2692            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2693        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2694                sizeof(WHITE_BALANCE_MODES_MAP),
2695                fwk_whiteLevel);
2696        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2697                sizeof(whiteLevel), &whiteLevel);
2698    }
2699
2700    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2701        uint8_t fwk_effectMode =
2702            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2703        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2704                sizeof(EFFECT_MODES_MAP),
2705                fwk_effectMode);
2706        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2707                sizeof(effectMode), &effectMode);
2708    }
2709
2710    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2711        uint8_t fwk_aeMode =
2712            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2713        uint8_t aeMode;
2714        int32_t redeye;
2715
2716        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2717            aeMode = CAM_AE_MODE_OFF;
2718        } else {
2719            aeMode = CAM_AE_MODE_ON;
2720        }
2721        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2722            redeye = 1;
2723        } else {
2724            redeye = 0;
2725        }
2726
2727        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2728                                          sizeof(AE_FLASH_MODE_MAP),
2729                                          fwk_aeMode);
2730        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2731                sizeof(aeMode), &aeMode);
2732        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2733                sizeof(flashMode), &flashMode);
2734        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2735                sizeof(redeye), &redeye);
2736    }
2737
2738    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2739        uint8_t colorCorrectMode =
2740            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2741        rc =
2742            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2743                    sizeof(colorCorrectMode), &colorCorrectMode);
2744    }
2745
2746    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2747        cam_color_correct_gains_t colorCorrectGains;
2748        for (int i = 0; i < 4; i++) {
2749            colorCorrectGains.gains[i] =
2750                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2751        }
2752        rc =
2753            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2754                    sizeof(colorCorrectGains), &colorCorrectGains);
2755    }
2756
2757    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2758        cam_color_correct_matrix_t colorCorrectTransform;
2759        cam_rational_type_t transform_elem;
2760        int num = 0;
2761        for (int i = 0; i < 3; i++) {
2762           for (int j = 0; j < 3; j++) {
2763              transform_elem.numerator =
2764                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2765              transform_elem.denominator =
2766                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2767              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2768              num++;
2769           }
2770        }
2771        rc =
2772            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
2773                    sizeof(colorCorrectTransform), &colorCorrectTransform);
2774    }
2775
2776    cam_trigger_t aecTrigger;
2777    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2778    aecTrigger.trigger_id = -1;
2779    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2780        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2781        aecTrigger.trigger =
2782            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2783        aecTrigger.trigger_id =
2784            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2785    }
2786    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2787                                sizeof(aecTrigger), &aecTrigger);
2788
2789    /*af_trigger must come with a trigger id*/
2790    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2791        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2792        cam_trigger_t af_trigger;
2793        af_trigger.trigger =
2794            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2795        af_trigger.trigger_id =
2796            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2797        rc = AddSetParmEntryToBatch(mParameters,
2798                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2799    }
2800
2801    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2802        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2803        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2804                sizeof(metaMode), &metaMode);
2805        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2806           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2807           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2808                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2809                                             fwk_sceneMode);
2810           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2811                sizeof(sceneMode), &sceneMode);
2812        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2813           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2814           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2815                sizeof(sceneMode), &sceneMode);
2816        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2817           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2818           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2819                sizeof(sceneMode), &sceneMode);
2820        }
2821    }
2822
2823    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2824        int32_t demosaic =
2825            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2826        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2827                sizeof(demosaic), &demosaic);
2828    }
2829
2830    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2831        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2832        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
2833                sizeof(edgeMode), &edgeMode);
2834    }
2835
2836    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2837        int32_t edgeStrength =
2838            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2839        rc = AddSetParmEntryToBatch(mParameters,
2840                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2841    }
2842
2843    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2844        int32_t respectFlashMode = 1;
2845        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2846            uint8_t fwk_aeMode =
2847                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2848            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
2849                respectFlashMode = 0;
2850                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
2851                    __func__);
2852            }
2853        }
2854        if (respectFlashMode) {
2855            uint8_t flashMode =
2856                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2857            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
2858                                          sizeof(FLASH_MODES_MAP),
2859                                          flashMode);
2860            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
2861            // To check: CAM_INTF_META_FLASH_MODE usage
2862            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2863                          sizeof(flashMode), &flashMode);
2864        }
2865    }
2866
2867    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2868        uint8_t flashPower =
2869            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2870        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2871                sizeof(flashPower), &flashPower);
2872    }
2873
2874    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2875        int64_t flashFiringTime =
2876            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2877        rc = AddSetParmEntryToBatch(mParameters,
2878                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2879    }
2880
2881    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2882        uint8_t geometricMode =
2883            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2884        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2885                sizeof(geometricMode), &geometricMode);
2886    }
2887
2888    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2889        uint8_t geometricStrength =
2890            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2891        rc = AddSetParmEntryToBatch(mParameters,
2892                CAM_INTF_META_GEOMETRIC_STRENGTH,
2893                sizeof(geometricStrength), &geometricStrength);
2894    }
2895
2896    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2897        uint8_t hotPixelMode =
2898            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2899        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2900                sizeof(hotPixelMode), &hotPixelMode);
2901    }
2902
2903    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2904        float lensAperture =
2905            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2906        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2907                sizeof(lensAperture), &lensAperture);
2908    }
2909
2910    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2911        float filterDensity =
2912            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2913        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2914                sizeof(filterDensity), &filterDensity);
2915    }
2916
2917    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2918        float focalLength =
2919            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2920        rc = AddSetParmEntryToBatch(mParameters,
2921                CAM_INTF_META_LENS_FOCAL_LENGTH,
2922                sizeof(focalLength), &focalLength);
2923    }
2924
2925    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2926        uint8_t optStabMode =
2927            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2928        rc = AddSetParmEntryToBatch(mParameters,
2929                CAM_INTF_META_LENS_OPT_STAB_MODE,
2930                sizeof(optStabMode), &optStabMode);
2931    }
2932
2933    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2934        uint8_t noiseRedMode =
2935            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2936        rc = AddSetParmEntryToBatch(mParameters,
2937                CAM_INTF_META_NOISE_REDUCTION_MODE,
2938                sizeof(noiseRedMode), &noiseRedMode);
2939    }
2940
2941    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2942        uint8_t noiseRedStrength =
2943            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2944        rc = AddSetParmEntryToBatch(mParameters,
2945                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2946                sizeof(noiseRedStrength), &noiseRedStrength);
2947    }
2948
2949    cam_crop_region_t scalerCropRegion;
2950    bool scalerCropSet = false;
2951    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2952        scalerCropRegion.left =
2953            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2954        scalerCropRegion.top =
2955            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2956        scalerCropRegion.width =
2957            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2958        scalerCropRegion.height =
2959            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2960        rc = AddSetParmEntryToBatch(mParameters,
2961                CAM_INTF_META_SCALER_CROP_REGION,
2962                sizeof(scalerCropRegion), &scalerCropRegion);
2963        scalerCropSet = true;
2964    }
2965
2966    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2967        int64_t sensorExpTime =
2968            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2969        rc = AddSetParmEntryToBatch(mParameters,
2970                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2971                sizeof(sensorExpTime), &sensorExpTime);
2972    }
2973
2974    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2975        int64_t sensorFrameDuration =
2976            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2977        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
2978            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
2979        rc = AddSetParmEntryToBatch(mParameters,
2980                CAM_INTF_META_SENSOR_FRAME_DURATION,
2981                sizeof(sensorFrameDuration), &sensorFrameDuration);
2982    }
2983
2984    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2985        int32_t sensorSensitivity =
2986            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2987        if (sensorSensitivity <
2988                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
2989            sensorSensitivity =
2990                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
2991        if (sensorSensitivity >
2992                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
2993            sensorSensitivity =
2994                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
2995        rc = AddSetParmEntryToBatch(mParameters,
2996                CAM_INTF_META_SENSOR_SENSITIVITY,
2997                sizeof(sensorSensitivity), &sensorSensitivity);
2998    }
2999
3000    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3001        int32_t shadingMode =
3002            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3003        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3004                sizeof(shadingMode), &shadingMode);
3005    }
3006
3007    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3008        uint8_t shadingStrength =
3009            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3010        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3011                sizeof(shadingStrength), &shadingStrength);
3012    }
3013
3014    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3015        uint8_t fwk_facedetectMode =
3016            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3017        uint8_t facedetectMode =
3018            lookupHalName(FACEDETECT_MODES_MAP,
3019                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3020        rc = AddSetParmEntryToBatch(mParameters,
3021                CAM_INTF_META_STATS_FACEDETECT_MODE,
3022                sizeof(facedetectMode), &facedetectMode);
3023    }
3024
3025    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3026        uint8_t histogramMode =
3027            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3028        rc = AddSetParmEntryToBatch(mParameters,
3029                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3030                sizeof(histogramMode), &histogramMode);
3031    }
3032
3033    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3034        uint8_t sharpnessMapMode =
3035            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3036        rc = AddSetParmEntryToBatch(mParameters,
3037                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3038                sizeof(sharpnessMapMode), &sharpnessMapMode);
3039    }
3040
3041    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3042        uint8_t tonemapMode =
3043            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3044        rc = AddSetParmEntryToBatch(mParameters,
3045                CAM_INTF_META_TONEMAP_MODE,
3046                sizeof(tonemapMode), &tonemapMode);
3047    }
3048    int point = 0;
3049    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3050        cam_tonemap_curve_t tonemapCurveBlue;
3051        tonemapCurveBlue.tonemap_points_cnt =
3052           gCamCapability[mCameraId]->max_tone_map_curve_points;
3053        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3054            for (int j = 0; j < 2; j++) {
3055               tonemapCurveBlue.tonemap_points[i][j] =
3056                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3057               point++;
3058            }
3059        }
3060        rc = AddSetParmEntryToBatch(mParameters,
3061                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3062                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3063    }
3064    point = 0;
3065    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3066        cam_tonemap_curve_t tonemapCurveGreen;
3067        tonemapCurveGreen.tonemap_points_cnt =
3068           gCamCapability[mCameraId]->max_tone_map_curve_points;
3069        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3070            for (int j = 0; j < 2; j++) {
3071               tonemapCurveGreen.tonemap_points[i][j] =
3072                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3073               point++;
3074            }
3075        }
3076        rc = AddSetParmEntryToBatch(mParameters,
3077                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3078                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3079    }
3080    point = 0;
3081    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3082        cam_tonemap_curve_t tonemapCurveRed;
3083        tonemapCurveRed.tonemap_points_cnt =
3084           gCamCapability[mCameraId]->max_tone_map_curve_points;
3085        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3086            for (int j = 0; j < 2; j++) {
3087               tonemapCurveRed.tonemap_points[i][j] =
3088                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3089               point++;
3090            }
3091        }
3092        rc = AddSetParmEntryToBatch(mParameters,
3093                CAM_INTF_META_TONEMAP_CURVE_RED,
3094                sizeof(tonemapCurveRed), &tonemapCurveRed);
3095    }
3096
3097    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3098        uint8_t captureIntent =
3099            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3100        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3101                sizeof(captureIntent), &captureIntent);
3102    }
3103
3104    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3105        uint8_t blackLevelLock =
3106            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3107        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3108                sizeof(blackLevelLock), &blackLevelLock);
3109    }
3110
3111    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3112        uint8_t lensShadingMapMode =
3113            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3114        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3115                sizeof(lensShadingMapMode), &lensShadingMapMode);
3116    }
3117
3118    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3119        cam_area_t roi;
3120        bool reset = true;
3121        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
3122        if (scalerCropSet) {
3123            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3124        }
3125        if (reset) {
3126            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3127                    sizeof(roi), &roi);
3128        }
3129    }
3130
3131    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3132        cam_area_t roi;
3133        bool reset = true;
3134        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
3135        if (scalerCropSet) {
3136            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3137        }
3138        if (reset) {
3139            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3140                    sizeof(roi), &roi);
3141        }
3142    }
3143
3144    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3145        cam_area_t roi;
3146        bool reset = true;
3147        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
3148        if (scalerCropSet) {
3149            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3150        }
3151        if (reset) {
3152            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3153                    sizeof(roi), &roi);
3154        }
3155    }
3156    return rc;
3157}
3158
3159/*===========================================================================
3160 * FUNCTION   : getJpegSettings
3161 *
3162 * DESCRIPTION: save the jpeg settings in the HAL
3163 *
3164 *
3165 * PARAMETERS :
3166 *   @settings  : frame settings information from framework
3167 *
3168 *
3169 * RETURN     : success: NO_ERROR
3170 *              failure:
3171 *==========================================================================*/
3172int QCamera3HardwareInterface::getJpegSettings
3173                                  (const camera_metadata_t *settings)
3174{
3175    if (mJpegSettings) {
3176        if (mJpegSettings->gps_timestamp) {
3177            free(mJpegSettings->gps_timestamp);
3178            mJpegSettings->gps_timestamp = NULL;
3179        }
3180        if (mJpegSettings->gps_coordinates) {
3181            for (int i = 0; i < 3; i++) {
3182                free(mJpegSettings->gps_coordinates[i]);
3183                mJpegSettings->gps_coordinates[i] = NULL;
3184            }
3185        }
3186        free(mJpegSettings);
3187        mJpegSettings = NULL;
3188    }
3189    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3190    CameraMetadata jpeg_settings;
3191    jpeg_settings = settings;
3192
3193    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3194        mJpegSettings->jpeg_orientation =
3195            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3196    } else {
3197        mJpegSettings->jpeg_orientation = 0;
3198    }
3199    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3200        mJpegSettings->jpeg_quality =
3201            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3202    } else {
3203        mJpegSettings->jpeg_quality = 85;
3204    }
3205    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3206        mJpegSettings->thumbnail_size.width =
3207            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3208        mJpegSettings->thumbnail_size.height =
3209            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3210    } else {
3211        mJpegSettings->thumbnail_size.width = 0;
3212        mJpegSettings->thumbnail_size.height = 0;
3213    }
3214    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3215        for (int i = 0; i < 3; i++) {
3216            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3217            *(mJpegSettings->gps_coordinates[i]) =
3218                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3219        }
3220    } else{
3221       for (int i = 0; i < 3; i++) {
3222            mJpegSettings->gps_coordinates[i] = NULL;
3223        }
3224    }
3225
3226    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3227        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3228        *(mJpegSettings->gps_timestamp) =
3229            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3230    } else {
3231        mJpegSettings->gps_timestamp = NULL;
3232    }
3233
3234    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3235        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3236        for (int i = 0; i < len; i++) {
3237            mJpegSettings->gps_processing_method[i] =
3238                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3239        }
3240        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3241            mJpegSettings->gps_processing_method[len] = '\0';
3242        }
3243    } else {
3244        mJpegSettings->gps_processing_method[0] = '\0';
3245    }
3246
3247    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3248        mJpegSettings->sensor_sensitivity =
3249            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3250    } else {
3251        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3252    }
3253
3254    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3255
3256    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3257        mJpegSettings->lens_focal_length =
3258            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3259    }
3260    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3261        mJpegSettings->exposure_compensation =
3262            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3263    }
3264    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3265    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3266    mJpegSettings->is_jpeg_format = true;
3267    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3268    return 0;
3269}
3270
3271/*===========================================================================
3272 * FUNCTION   : captureResultCb
3273 *
3274 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3275 *
3276 * PARAMETERS :
3277 *   @frame  : frame information from mm-camera-interface
3278 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3279 *   @userdata: userdata
3280 *
3281 * RETURN     : NONE
3282 *==========================================================================*/
3283void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3284                camera3_stream_buffer_t *buffer,
3285                uint32_t frame_number, void *userdata)
3286{
3287    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3288    if (hw == NULL) {
3289        ALOGE("%s: Invalid hw %p", __func__, hw);
3290        return;
3291    }
3292
3293    hw->captureResultCb(metadata, buffer, frame_number);
3294    return;
3295}
3296
3297
3298/*===========================================================================
3299 * FUNCTION   : initialize
3300 *
3301 * DESCRIPTION: Pass framework callback pointers to HAL
3302 *
3303 * PARAMETERS :
3304 *
3305 *
3306 * RETURN     : Success : 0
3307 *              Failure: -ENODEV
3308 *==========================================================================*/
3309
3310int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3311                                  const camera3_callback_ops_t *callback_ops)
3312{
3313    ALOGV("%s: E", __func__);
3314    QCamera3HardwareInterface *hw =
3315        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3316    if (!hw) {
3317        ALOGE("%s: NULL camera device", __func__);
3318        return -ENODEV;
3319    }
3320
3321    int rc = hw->initialize(callback_ops);
3322    ALOGV("%s: X", __func__);
3323    return rc;
3324}
3325
3326/*===========================================================================
3327 * FUNCTION   : configure_streams
3328 *
3329 * DESCRIPTION:
3330 *
3331 * PARAMETERS :
3332 *
3333 *
3334 * RETURN     : Success: 0
3335 *              Failure: -EINVAL (if stream configuration is invalid)
3336 *                       -ENODEV (fatal error)
3337 *==========================================================================*/
3338
3339int QCamera3HardwareInterface::configure_streams(
3340        const struct camera3_device *device,
3341        camera3_stream_configuration_t *stream_list)
3342{
3343    ALOGV("%s: E", __func__);
3344    QCamera3HardwareInterface *hw =
3345        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3346    if (!hw) {
3347        ALOGE("%s: NULL camera device", __func__);
3348        return -ENODEV;
3349    }
3350    int rc = hw->configureStreams(stream_list);
3351    ALOGV("%s: X", __func__);
3352    return rc;
3353}
3354
3355/*===========================================================================
3356 * FUNCTION   : register_stream_buffers
3357 *
3358 * DESCRIPTION: Register stream buffers with the device
3359 *
3360 * PARAMETERS :
3361 *
3362 * RETURN     :
3363 *==========================================================================*/
3364int QCamera3HardwareInterface::register_stream_buffers(
3365        const struct camera3_device *device,
3366        const camera3_stream_buffer_set_t *buffer_set)
3367{
3368    ALOGV("%s: E", __func__);
3369    QCamera3HardwareInterface *hw =
3370        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3371    if (!hw) {
3372        ALOGE("%s: NULL camera device", __func__);
3373        return -ENODEV;
3374    }
3375    int rc = hw->registerStreamBuffers(buffer_set);
3376    ALOGV("%s: X", __func__);
3377    return rc;
3378}
3379
3380/*===========================================================================
3381 * FUNCTION   : construct_default_request_settings
3382 *
3383 * DESCRIPTION: Configure a settings buffer to meet the required use case
3384 *
3385 * PARAMETERS :
3386 *
3387 *
3388 * RETURN     : Success: Return valid metadata
3389 *              Failure: Return NULL
3390 *==========================================================================*/
3391const camera_metadata_t* QCamera3HardwareInterface::
3392    construct_default_request_settings(const struct camera3_device *device,
3393                                        int type)
3394{
3395
3396    ALOGV("%s: E", __func__);
3397    camera_metadata_t* fwk_metadata = NULL;
3398    QCamera3HardwareInterface *hw =
3399        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3400    if (!hw) {
3401        ALOGE("%s: NULL camera device", __func__);
3402        return NULL;
3403    }
3404
3405    fwk_metadata = hw->translateCapabilityToMetadata(type);
3406
3407    ALOGV("%s: X", __func__);
3408    return fwk_metadata;
3409}
3410
3411/*===========================================================================
3412 * FUNCTION   : process_capture_request
3413 *
3414 * DESCRIPTION:
3415 *
3416 * PARAMETERS :
3417 *
3418 *
3419 * RETURN     :
3420 *==========================================================================*/
3421int QCamera3HardwareInterface::process_capture_request(
3422                    const struct camera3_device *device,
3423                    camera3_capture_request_t *request)
3424{
3425    ALOGV("%s: E", __func__);
3426    QCamera3HardwareInterface *hw =
3427        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3428    if (!hw) {
3429        ALOGE("%s: NULL camera device", __func__);
3430        return -EINVAL;
3431    }
3432
3433    int rc = hw->processCaptureRequest(request);
3434    ALOGV("%s: X", __func__);
3435    return rc;
3436}
3437
3438/*===========================================================================
3439 * FUNCTION   : get_metadata_vendor_tag_ops
3440 *
3441 * DESCRIPTION:
3442 *
3443 * PARAMETERS :
3444 *
3445 *
3446 * RETURN     :
3447 *==========================================================================*/
3448
3449void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3450                const struct camera3_device *device,
3451                vendor_tag_query_ops_t* ops)
3452{
3453    ALOGV("%s: E", __func__);
3454    QCamera3HardwareInterface *hw =
3455        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3456    if (!hw) {
3457        ALOGE("%s: NULL camera device", __func__);
3458        return;
3459    }
3460
3461    hw->getMetadataVendorTagOps(ops);
3462    ALOGV("%s: X", __func__);
3463    return;
3464}
3465
3466/*===========================================================================
3467 * FUNCTION   : dump
3468 *
3469 * DESCRIPTION:
3470 *
3471 * PARAMETERS :
3472 *
3473 *
3474 * RETURN     :
3475 *==========================================================================*/
3476
3477void QCamera3HardwareInterface::dump(
3478                const struct camera3_device *device, int fd)
3479{
3480    ALOGV("%s: E", __func__);
3481    QCamera3HardwareInterface *hw =
3482        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3483    if (!hw) {
3484        ALOGE("%s: NULL camera device", __func__);
3485        return;
3486    }
3487
3488    hw->dump(fd);
3489    ALOGV("%s: X", __func__);
3490    return;
3491}
3492
3493/*===========================================================================
3494 * FUNCTION   : close_camera_device
3495 *
3496 * DESCRIPTION:
3497 *
3498 * PARAMETERS :
3499 *
3500 *
3501 * RETURN     :
3502 *==========================================================================*/
3503int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3504{
3505    ALOGV("%s: E", __func__);
3506    int ret = NO_ERROR;
3507    QCamera3HardwareInterface *hw =
3508        reinterpret_cast<QCamera3HardwareInterface *>(
3509            reinterpret_cast<camera3_device_t *>(device)->priv);
3510    if (!hw) {
3511        ALOGE("NULL camera device");
3512        return BAD_VALUE;
3513    }
3514    delete hw;
3515
3516    pthread_mutex_lock(&mCameraSessionLock);
3517    mCameraSessionActive = 0;
3518    pthread_mutex_unlock(&mCameraSessionLock);
3519    ALOGV("%s: X", __func__);
3520    return ret;
3521}
3522
3523/*===========================================================================
3524 * FUNCTION   : getWaveletDenoiseProcessPlate
3525 *
3526 * DESCRIPTION: query wavelet denoise process plate
3527 *
3528 * PARAMETERS : None
3529 *
3530 * RETURN     : WNR prcocess plate vlaue
3531 *==========================================================================*/
3532cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3533{
3534    char prop[PROPERTY_VALUE_MAX];
3535    memset(prop, 0, sizeof(prop));
3536    property_get("persist.denoise.process.plates", prop, "0");
3537    int processPlate = atoi(prop);
3538    switch(processPlate) {
3539    case 0:
3540        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3541    case 1:
3542        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3543    case 2:
3544        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3545    case 3:
3546        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3547    default:
3548        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3549    }
3550}
3551
3552/*===========================================================================
3553 * FUNCTION   : needRotationReprocess
3554 *
3555 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3556 *
3557 * PARAMETERS : none
3558 *
3559 * RETURN     : true: needed
3560 *              false: no need
3561 *==========================================================================*/
3562bool QCamera3HardwareInterface::needRotationReprocess()
3563{
3564
3565    if (!mJpegSettings->is_jpeg_format) {
3566        // RAW image, no need to reprocess
3567        return false;
3568    }
3569
3570    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3571        mJpegSettings->jpeg_orientation > 0) {
3572        // current rotation is not zero, and pp has the capability to process rotation
3573        ALOGD("%s: need do reprocess for rotation", __func__);
3574        return true;
3575    }
3576
3577    return false;
3578}
3579
3580/*===========================================================================
3581 * FUNCTION   : needReprocess
3582 *
3583 * DESCRIPTION: if reprocess in needed
3584 *
3585 * PARAMETERS : none
3586 *
3587 * RETURN     : true: needed
3588 *              false: no need
3589 *==========================================================================*/
3590bool QCamera3HardwareInterface::needReprocess()
3591{
3592    if (!mJpegSettings->is_jpeg_format) {
3593        // RAW image, no need to reprocess
3594        return false;
3595    }
3596
3597    if ((mJpegSettings->min_required_pp_mask > 0) ||
3598         isWNREnabled()) {
3599        // TODO: add for ZSL HDR later
3600        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3601        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3602        return true;
3603    }
3604    return needRotationReprocess();
3605}
3606
3607/*===========================================================================
3608 * FUNCTION   : addOnlineReprocChannel
3609 *
3610 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3611 *              coming from input channel
3612 *
3613 * PARAMETERS :
3614 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3615 *
3616 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3617 *==========================================================================*/
3618QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3619                                                      QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3620{
3621    int32_t rc = NO_ERROR;
3622    QCamera3ReprocessChannel *pChannel = NULL;
3623    if (pInputChannel == NULL) {
3624        ALOGE("%s: input channel obj is NULL", __func__);
3625        return NULL;
3626    }
3627
3628    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3629            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3630    if (NULL == pChannel) {
3631        ALOGE("%s: no mem for reprocess channel", __func__);
3632        return NULL;
3633    }
3634
3635    // Capture channel, only need snapshot and postview streams start together
3636    mm_camera_channel_attr_t attr;
3637    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3638    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3639    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3640    rc = pChannel->initialize();
3641    if (rc != NO_ERROR) {
3642        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3643        delete pChannel;
3644        return NULL;
3645    }
3646
3647    // pp feature config
3648    cam_pp_feature_config_t pp_config;
3649    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3650    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3651        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3652        pp_config.sharpness = 10;
3653    }
3654
3655    if (isWNREnabled()) {
3656        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3657        pp_config.denoise2d.denoise_enable = 1;
3658        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3659    }
3660    if (needRotationReprocess()) {
3661        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3662        int rotation = mJpegSettings->jpeg_orientation;
3663        if (rotation == 0) {
3664            pp_config.rotation = ROTATE_0;
3665        } else if (rotation == 90) {
3666            pp_config.rotation = ROTATE_90;
3667        } else if (rotation == 180) {
3668            pp_config.rotation = ROTATE_180;
3669        } else if (rotation == 270) {
3670            pp_config.rotation = ROTATE_270;
3671        }
3672    }
3673
3674   rc = pChannel->addReprocStreamsFromSource(pp_config,
3675                                             pInputChannel,
3676                                             mMetadataChannel);
3677
3678    if (rc != NO_ERROR) {
3679        delete pChannel;
3680        return NULL;
3681    }
3682    return pChannel;
3683}
3684
3685int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3686{
3687    return gCamCapability[mCameraId]->min_num_pp_bufs;
3688}
3689
3690bool QCamera3HardwareInterface::isWNREnabled() {
3691    return gCamCapability[mCameraId]->isWnrSupported;
3692}
3693
3694}; //end namespace qcamera
3695