QCamera3HWI.cpp revision 867ab9fde22765039b15b11d630fc99933abd244
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        if (mMetadataChannel) {
254            mMetadataChannel->stop();
255            delete mMetadataChannel;
256            mMetadataChannel = NULL;
257        }
258        deinitParameters();
259    }
260
261    if (mCameraOpened)
262        closeCamera();
263
264    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
265        if (mDefaultMetadata[i])
266            free_camera_metadata(mDefaultMetadata[i]);
267
268    pthread_cond_destroy(&mRequestCond);
269
270    pthread_mutex_destroy(&mMutex);
271    ALOGV("%s: X", __func__);
272}
273
274/*===========================================================================
275 * FUNCTION   : openCamera
276 *
277 * DESCRIPTION: open camera
278 *
279 * PARAMETERS :
280 *   @hw_device  : double ptr for camera device struct
281 *
282 * RETURN     : int32_t type of status
283 *              NO_ERROR  -- success
284 *              none-zero failure code
285 *==========================================================================*/
286int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
287{
288    int rc = 0;
289    pthread_mutex_lock(&mCameraSessionLock);
290    if (mCameraSessionActive) {
291        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
292        pthread_mutex_unlock(&mCameraSessionLock);
293        return INVALID_OPERATION;
294    }
295
296    if (mCameraOpened) {
297        *hw_device = NULL;
298        return PERMISSION_DENIED;
299    }
300
301    rc = openCamera();
302    if (rc == 0) {
303        *hw_device = &mCameraDevice.common;
304        mCameraSessionActive = 1;
305    } else
306        *hw_device = NULL;
307
308#ifdef HAS_MULTIMEDIA_HINTS
309    if (rc == 0) {
310        if (m_pPowerModule) {
311            if (m_pPowerModule->powerHint) {
312                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
313                        (void *)"state=1");
314            }
315        }
316    }
317#endif
318    pthread_mutex_unlock(&mCameraSessionLock);
319    return rc;
320}
321
322/*===========================================================================
323 * FUNCTION   : openCamera
324 *
325 * DESCRIPTION: open camera
326 *
327 * PARAMETERS : none
328 *
329 * RETURN     : int32_t type of status
330 *              NO_ERROR  -- success
331 *              none-zero failure code
332 *==========================================================================*/
333int QCamera3HardwareInterface::openCamera()
334{
335    if (mCameraHandle) {
336        ALOGE("Failure: Camera already opened");
337        return ALREADY_EXISTS;
338    }
339    mCameraHandle = camera_open(mCameraId);
340    if (!mCameraHandle) {
341        ALOGE("camera_open failed.");
342        return UNKNOWN_ERROR;
343    }
344
345    mCameraOpened = true;
346
347    return NO_ERROR;
348}
349
350/*===========================================================================
351 * FUNCTION   : closeCamera
352 *
353 * DESCRIPTION: close camera
354 *
355 * PARAMETERS : none
356 *
357 * RETURN     : int32_t type of status
358 *              NO_ERROR  -- success
359 *              none-zero failure code
360 *==========================================================================*/
361int QCamera3HardwareInterface::closeCamera()
362{
363    int rc = NO_ERROR;
364
365    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
366    mCameraHandle = NULL;
367    mCameraOpened = false;
368
369#ifdef HAS_MULTIMEDIA_HINTS
370    if (rc == NO_ERROR) {
371        if (m_pPowerModule) {
372            if (m_pPowerModule->powerHint) {
373                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
374                        (void *)"state=0");
375            }
376        }
377    }
378#endif
379
380    return rc;
381}
382
383/*===========================================================================
384 * FUNCTION   : initialize
385 *
386 * DESCRIPTION: Initialize frameworks callback functions
387 *
388 * PARAMETERS :
389 *   @callback_ops : callback function to frameworks
390 *
391 * RETURN     :
392 *
393 *==========================================================================*/
394int QCamera3HardwareInterface::initialize(
395        const struct camera3_callback_ops *callback_ops)
396{
397    int rc;
398
399    pthread_mutex_lock(&mMutex);
400
401    rc = initParameters();
402    if (rc < 0) {
403        ALOGE("%s: initParamters failed %d", __func__, rc);
404       goto err1;
405    }
406    mCallbackOps = callback_ops;
407
408    pthread_mutex_unlock(&mMutex);
409    mCameraInitialized = true;
410    return 0;
411
412err1:
413    pthread_mutex_unlock(&mMutex);
414    return rc;
415}
416
417/*===========================================================================
418 * FUNCTION   : configureStreams
419 *
420 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
421 *              and output streams.
422 *
423 * PARAMETERS :
424 *   @stream_list : streams to be configured
425 *
426 * RETURN     :
427 *
428 *==========================================================================*/
429int QCamera3HardwareInterface::configureStreams(
430        camera3_stream_configuration_t *streamList)
431{
432    int rc = 0;
433    mIsZslMode = false;
434
435    // Sanity check stream_list
436    if (streamList == NULL) {
437        ALOGE("%s: NULL stream configuration", __func__);
438        return BAD_VALUE;
439    }
440    if (streamList->streams == NULL) {
441        ALOGE("%s: NULL stream list", __func__);
442        return BAD_VALUE;
443    }
444
445    if (streamList->num_streams < 1) {
446        ALOGE("%s: Bad number of streams requested: %d", __func__,
447                streamList->num_streams);
448        return BAD_VALUE;
449    }
450
451    /* first invalidate all the steams in the mStreamList
452     * if they appear again, they will be validated */
453    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
454            it != mStreamInfo.end(); it++) {
455        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
456        channel->stop();
457        (*it)->status = INVALID;
458    }
459    if (mMetadataChannel) {
460        /* If content of mStreamInfo is not 0, there is metadata stream */
461        mMetadataChannel->stop();
462    }
463
464    pthread_mutex_lock(&mMutex);
465
466    camera3_stream_t *inputStream = NULL;
467    camera3_stream_t *jpegStream = NULL;
468    cam_stream_size_info_t stream_config_info;
469
470    for (size_t i = 0; i < streamList->num_streams; i++) {
471        camera3_stream_t *newStream = streamList->streams[i];
472        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
473                __func__, newStream->stream_type, newStream->format,
474                 newStream->width, newStream->height);
475        //if the stream is in the mStreamList validate it
476        bool stream_exists = false;
477        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
478                it != mStreamInfo.end(); it++) {
479            if ((*it)->stream == newStream) {
480                QCamera3Channel *channel =
481                    (QCamera3Channel*)(*it)->stream->priv;
482                stream_exists = true;
483                (*it)->status = RECONFIGURE;
484                /*delete the channel object associated with the stream because
485                  we need to reconfigure*/
486                delete channel;
487                (*it)->stream->priv = NULL;
488            }
489        }
490        if (!stream_exists) {
491            //new stream
492            stream_info_t* stream_info;
493            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
494            stream_info->stream = newStream;
495            stream_info->status = VALID;
496            stream_info->registered = 0;
497            mStreamInfo.push_back(stream_info);
498        }
499        if (newStream->stream_type == CAMERA3_STREAM_INPUT
500                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
501            if (inputStream != NULL) {
502                ALOGE("%s: Multiple input streams requested!", __func__);
503                pthread_mutex_unlock(&mMutex);
504                return BAD_VALUE;
505            }
506            inputStream = newStream;
507        }
508        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
509            jpegStream = newStream;
510        }
511    }
512    mInputStream = inputStream;
513
514    /*clean up invalid streams*/
515    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
516            it != mStreamInfo.end();) {
517        if(((*it)->status) == INVALID){
518            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
519            delete channel;
520            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
521            free(*it);
522            it = mStreamInfo.erase(it);
523        } else {
524            it++;
525        }
526    }
527    if (mMetadataChannel) {
528        delete mMetadataChannel;
529        mMetadataChannel = NULL;
530    }
531
532    //Create metadata channel and initialize it
533    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
534                    mCameraHandle->ops, captureResultCb,
535                    &gCamCapability[mCameraId]->padding_info, this);
536    if (mMetadataChannel == NULL) {
537        ALOGE("%s: failed to allocate metadata channel", __func__);
538        rc = -ENOMEM;
539        pthread_mutex_unlock(&mMutex);
540        return rc;
541    }
542    rc = mMetadataChannel->initialize();
543    if (rc < 0) {
544        ALOGE("%s: metadata channel initialization failed", __func__);
545        delete mMetadataChannel;
546        pthread_mutex_unlock(&mMutex);
547        return rc;
548    }
549
550    /* Allocate channel objects for the requested streams */
551    for (size_t i = 0; i < streamList->num_streams; i++) {
552        camera3_stream_t *newStream = streamList->streams[i];
553        uint32_t stream_usage = newStream->usage;
554        stream_config_info.stream_sizes[i].width = newStream->width;
555        stream_config_info.stream_sizes[i].height = newStream->height;
556        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
557            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
558            //for zsl stream the size is jpeg size
559            stream_config_info.stream_sizes[i].width = jpegStream->width;
560            stream_config_info.stream_sizes[i].height = jpegStream->height;
561            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
562        } else {
563           //for non zsl streams find out the format
564           switch (newStream->format) {
565           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
566              {
567                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
569                 } else {
570                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
571                 }
572              }
573              break;
574           case HAL_PIXEL_FORMAT_YCbCr_420_888:
575              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
576              break;
577           case HAL_PIXEL_FORMAT_BLOB:
578              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
579              break;
580           default:
581              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
582              break;
583           }
584        }
585        if (newStream->priv == NULL) {
586            //New stream, construct channel
587            switch (newStream->stream_type) {
588            case CAMERA3_STREAM_INPUT:
589                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
590                break;
591            case CAMERA3_STREAM_BIDIRECTIONAL:
592                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
593                    GRALLOC_USAGE_HW_CAMERA_WRITE;
594                break;
595            case CAMERA3_STREAM_OUTPUT:
596                /* For video encoding stream, set read/write rarely
597                 * flag so that they may be set to un-cached */
598                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
599                    newStream->usage =
600                         (GRALLOC_USAGE_SW_READ_RARELY |
601                         GRALLOC_USAGE_SW_WRITE_RARELY |
602                         GRALLOC_USAGE_HW_CAMERA_WRITE);
603                else
604                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
605                break;
606            default:
607                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
608                break;
609            }
610
611            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
612                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
613                QCamera3Channel *channel;
614                switch (newStream->format) {
615                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
616                case HAL_PIXEL_FORMAT_YCbCr_420_888:
617                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
618                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
619                        jpegStream) {
620                        uint32_t width = jpegStream->width;
621                        uint32_t height = jpegStream->height;
622                        mIsZslMode = true;
623                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
624                            mCameraHandle->ops, captureResultCb,
625                            &gCamCapability[mCameraId]->padding_info, this, newStream,
626                            width, height);
627                    } else
628                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
629                            mCameraHandle->ops, captureResultCb,
630                            &gCamCapability[mCameraId]->padding_info, this, newStream);
631                    if (channel == NULL) {
632                        ALOGE("%s: allocation of channel failed", __func__);
633                        pthread_mutex_unlock(&mMutex);
634                        return -ENOMEM;
635                    }
636
637                    newStream->priv = channel;
638                    break;
639                case HAL_PIXEL_FORMAT_BLOB:
640                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
641                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
642                            mCameraHandle->ops, captureResultCb,
643                            &gCamCapability[mCameraId]->padding_info, this, newStream);
644                    if (mPictureChannel == NULL) {
645                        ALOGE("%s: allocation of channel failed", __func__);
646                        pthread_mutex_unlock(&mMutex);
647                        return -ENOMEM;
648                    }
649                    newStream->priv = (QCamera3Channel*)mPictureChannel;
650                    break;
651
652                //TODO: Add support for app consumed format?
653                default:
654                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
655                    break;
656                }
657            }
658        } else {
659            // Channel already exists for this stream
660            // Do nothing for now
661        }
662    }
663    /*For the streams to be reconfigured we need to register the buffers
664      since the framework wont*/
665    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
666            it != mStreamInfo.end(); it++) {
667        if ((*it)->status == RECONFIGURE) {
668            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
669            /*only register buffers for streams that have already been
670              registered*/
671            if ((*it)->registered) {
672                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
673                        (*it)->buffer_set.buffers);
674                if (rc != NO_ERROR) {
675                    ALOGE("%s: Failed to register the buffers of old stream,\
676                            rc = %d", __func__, rc);
677                }
678                ALOGV("%s: channel %p has %d buffers",
679                        __func__, channel, (*it)->buffer_set.num_buffers);
680            }
681        }
682
683        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
684        if (index == NAME_NOT_FOUND) {
685            mPendingBuffersMap.add((*it)->stream, 0);
686        } else {
687            mPendingBuffersMap.editValueAt(index) = 0;
688        }
689    }
690
691    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
692    mPendingRequestsList.clear();
693
694    /*flush the metadata list*/
695    if (!mStoredMetadataList.empty()) {
696        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
697              m != mStoredMetadataList.end(); m++) {
698            mMetadataChannel->bufDone(m->meta_buf);
699            free(m->meta_buf);
700            m = mStoredMetadataList.erase(m);
701        }
702    }
703    int32_t hal_version = CAM_HAL_V3;
704    stream_config_info.num_streams = streamList->num_streams;
705
706    //settings/parameters don't carry over for new configureStreams
707    memset(mParameters, 0, sizeof(parm_buffer_t));
708
709    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
710    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
711                sizeof(hal_version), &hal_version);
712
713    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
714                sizeof(stream_config_info), &stream_config_info);
715
716    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
717
718    mFirstRequest = true;
719
720    //Get min frame duration for this streams configuration
721    deriveMinFrameDuration();
722
723    pthread_mutex_unlock(&mMutex);
724    return rc;
725}
726
727/*===========================================================================
728 * FUNCTION   : validateCaptureRequest
729 *
730 * DESCRIPTION: validate a capture request from camera service
731 *
732 * PARAMETERS :
733 *   @request : request from framework to process
734 *
735 * RETURN     :
736 *
737 *==========================================================================*/
738int QCamera3HardwareInterface::validateCaptureRequest(
739                    camera3_capture_request_t *request)
740{
741    ssize_t idx = 0;
742    const camera3_stream_buffer_t *b;
743    CameraMetadata meta;
744
745    /* Sanity check the request */
746    if (request == NULL) {
747        ALOGE("%s: NULL capture request", __func__);
748        return BAD_VALUE;
749    }
750
751    uint32_t frameNumber = request->frame_number;
752    if (request->input_buffer != NULL &&
753            request->input_buffer->stream != mInputStream) {
754        ALOGE("%s: Request %d: Input buffer not from input stream!",
755                __FUNCTION__, frameNumber);
756        return BAD_VALUE;
757    }
758    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
759        ALOGE("%s: Request %d: No output buffers provided!",
760                __FUNCTION__, frameNumber);
761        return BAD_VALUE;
762    }
763    if (request->input_buffer != NULL) {
764        b = request->input_buffer;
765        QCamera3Channel *channel =
766            static_cast<QCamera3Channel*>(b->stream->priv);
767        if (channel == NULL) {
768            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
769                    __func__, frameNumber, idx);
770            return BAD_VALUE;
771        }
772        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
773            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
774                    __func__, frameNumber, idx);
775            return BAD_VALUE;
776        }
777        if (b->release_fence != -1) {
778            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
779                    __func__, frameNumber, idx);
780            return BAD_VALUE;
781        }
782        if (b->buffer == NULL) {
783            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
784                    __func__, frameNumber, idx);
785            return BAD_VALUE;
786        }
787    }
788
789    // Validate all buffers
790    b = request->output_buffers;
791    do {
792        QCamera3Channel *channel =
793                static_cast<QCamera3Channel*>(b->stream->priv);
794        if (channel == NULL) {
795            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
796                    __func__, frameNumber, idx);
797            return BAD_VALUE;
798        }
799        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
800            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
801                    __func__, frameNumber, idx);
802            return BAD_VALUE;
803        }
804        if (b->release_fence != -1) {
805            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
806                    __func__, frameNumber, idx);
807            return BAD_VALUE;
808        }
809        if (b->buffer == NULL) {
810            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
811                    __func__, frameNumber, idx);
812            return BAD_VALUE;
813        }
814        idx++;
815        b = request->output_buffers + idx;
816    } while (idx < (ssize_t)request->num_output_buffers);
817
818    return NO_ERROR;
819}
820
821/*===========================================================================
822 * FUNCTION   : deriveMinFrameDuration
823 *
824 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
825 *              on currently configured streams.
826 *
827 * PARAMETERS : NONE
828 *
829 * RETURN     : NONE
830 *
831 *==========================================================================*/
832void QCamera3HardwareInterface::deriveMinFrameDuration()
833{
834    int32_t maxJpegDimension, maxProcessedDimension;
835
836    maxJpegDimension = 0;
837    maxProcessedDimension = 0;
838
839    // Figure out maximum jpeg, processed, and raw dimensions
840    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
841        it != mStreamInfo.end(); it++) {
842
843        // Input stream doesn't have valid stream_type
844        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
845            continue;
846
847        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
848        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
849            if (dimension > maxJpegDimension)
850                maxJpegDimension = dimension;
851        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
852            if (dimension > maxProcessedDimension)
853                maxProcessedDimension = dimension;
854        }
855    }
856
857    //Assume all jpeg dimensions are in processed dimensions.
858    if (maxJpegDimension > maxProcessedDimension)
859        maxProcessedDimension = maxJpegDimension;
860
861    //Find minimum durations for processed, jpeg, and raw
862    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
863    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
864        if (maxProcessedDimension ==
865            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
866            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
867            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
868            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
869            break;
870        }
871    }
872}
873
874/*===========================================================================
875 * FUNCTION   : getMinFrameDuration
876 *
877 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
878 *              and current request configuration.
879 *
880 * PARAMETERS : @request: requset sent by the frameworks
881 *
882 * RETURN     : min farme duration for a particular request
883 *
884 *==========================================================================*/
885int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
886{
887    bool hasJpegStream = false;
888    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
889        const camera3_stream_t *stream = request->output_buffers[i].stream;
890        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
891            hasJpegStream = true;
892    }
893
894    if (!hasJpegStream)
895        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
896    else
897        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
898}
899
900/*===========================================================================
901 * FUNCTION   : registerStreamBuffers
902 *
903 * DESCRIPTION: Register buffers for a given stream with the HAL device.
904 *
905 * PARAMETERS :
906 *   @stream_list : streams to be configured
907 *
908 * RETURN     :
909 *
910 *==========================================================================*/
911int QCamera3HardwareInterface::registerStreamBuffers(
912        const camera3_stream_buffer_set_t *buffer_set)
913{
914    int rc = 0;
915
916    pthread_mutex_lock(&mMutex);
917
918    if (buffer_set == NULL) {
919        ALOGE("%s: Invalid buffer_set parameter.", __func__);
920        pthread_mutex_unlock(&mMutex);
921        return -EINVAL;
922    }
923    if (buffer_set->stream == NULL) {
924        ALOGE("%s: Invalid stream parameter.", __func__);
925        pthread_mutex_unlock(&mMutex);
926        return -EINVAL;
927    }
928    if (buffer_set->num_buffers < 1) {
929        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
930        pthread_mutex_unlock(&mMutex);
931        return -EINVAL;
932    }
933    if (buffer_set->buffers == NULL) {
934        ALOGE("%s: Invalid buffers parameter.", __func__);
935        pthread_mutex_unlock(&mMutex);
936        return -EINVAL;
937    }
938
939    camera3_stream_t *stream = buffer_set->stream;
940    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
941
942    //set the buffer_set in the mStreamInfo array
943    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
944            it != mStreamInfo.end(); it++) {
945        if ((*it)->stream == stream) {
946            uint32_t numBuffers = buffer_set->num_buffers;
947            (*it)->buffer_set.stream = buffer_set->stream;
948            (*it)->buffer_set.num_buffers = numBuffers;
949            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
950            if ((*it)->buffer_set.buffers == NULL) {
951                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
952                pthread_mutex_unlock(&mMutex);
953                return -ENOMEM;
954            }
955            for (size_t j = 0; j < numBuffers; j++){
956                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
957            }
958            (*it)->registered = 1;
959        }
960    }
961    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
962    if (rc < 0) {
963        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
964        pthread_mutex_unlock(&mMutex);
965        return -ENODEV;
966    }
967
968    pthread_mutex_unlock(&mMutex);
969    return NO_ERROR;
970}
971
972/*===========================================================================
973 * FUNCTION   : processCaptureRequest
974 *
975 * DESCRIPTION: process a capture request from camera service
976 *
977 * PARAMETERS :
978 *   @request : request from framework to process
979 *
980 * RETURN     :
981 *
982 *==========================================================================*/
983int QCamera3HardwareInterface::processCaptureRequest(
984                    camera3_capture_request_t *request)
985{
986    int rc = NO_ERROR;
987    int32_t request_id;
988    CameraMetadata meta;
989    MetadataBufferInfo reproc_meta;
990    int queueMetadata = 0;
991
992    pthread_mutex_lock(&mMutex);
993
994    rc = validateCaptureRequest(request);
995    if (rc != NO_ERROR) {
996        ALOGE("%s: incoming request is not valid", __func__);
997        pthread_mutex_unlock(&mMutex);
998        return rc;
999    }
1000
1001    meta = request->settings;
1002
1003    // For first capture request, send capture intent, and
1004    // stream on all streams
1005    if (mFirstRequest) {
1006
1007        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1008            int32_t hal_version = CAM_HAL_V3;
1009            uint8_t captureIntent =
1010                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1011
1012            memset(mParameters, 0, sizeof(parm_buffer_t));
1013            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1015                sizeof(hal_version), &hal_version);
1016            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1017                sizeof(captureIntent), &captureIntent);
1018            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1019                mParameters);
1020        }
1021
1022        mMetadataChannel->start();
1023        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1024            it != mStreamInfo.end(); it++) {
1025            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1026            channel->start();
1027        }
1028    }
1029
1030    uint32_t frameNumber = request->frame_number;
1031    uint32_t streamTypeMask = 0;
1032
1033    if (meta.exists(ANDROID_REQUEST_ID)) {
1034        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1035        mCurrentRequestId = request_id;
1036        ALOGV("%s: Received request with id: %d",__func__, request_id);
1037    } else if (mFirstRequest || mCurrentRequestId == -1){
1038        ALOGE("%s: Unable to find request id field, \
1039                & no previous id available", __func__);
1040        return NAME_NOT_FOUND;
1041    } else {
1042        ALOGV("%s: Re-using old request id", __func__);
1043        request_id = mCurrentRequestId;
1044    }
1045
1046    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1047                                    __func__, __LINE__,
1048                                    request->num_output_buffers,
1049                                    request->input_buffer,
1050                                    frameNumber);
1051    // Acquire all request buffers first
1052    int blob_request = 0;
1053    for (size_t i = 0; i < request->num_output_buffers; i++) {
1054        const camera3_stream_buffer_t& output = request->output_buffers[i];
1055        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1056        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1057
1058        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1059        //Call function to store local copy of jpeg data for encode params.
1060            blob_request = 1;
1061            rc = getJpegSettings(request->settings);
1062            if (rc < 0) {
1063                ALOGE("%s: failed to get jpeg parameters", __func__);
1064                pthread_mutex_unlock(&mMutex);
1065                return rc;
1066            }
1067        }
1068
1069        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1070        if (rc != OK) {
1071            ALOGE("%s: fence wait failed %d", __func__, rc);
1072            pthread_mutex_unlock(&mMutex);
1073            return rc;
1074        }
1075        streamTypeMask |= channel->getStreamTypeMask();
1076    }
1077
1078    rc = setFrameParameters(request, streamTypeMask);
1079    if (rc < 0) {
1080        ALOGE("%s: fail to set frame parameters", __func__);
1081        pthread_mutex_unlock(&mMutex);
1082        return rc;
1083    }
1084
1085    /* Update pending request list and pending buffers map */
1086    PendingRequestInfo pendingRequest;
1087    pendingRequest.frame_number = frameNumber;
1088    pendingRequest.num_buffers = request->num_output_buffers;
1089    pendingRequest.request_id = request_id;
1090    pendingRequest.blob_request = blob_request;
1091    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1092
1093    for (size_t i = 0; i < request->num_output_buffers; i++) {
1094        RequestedBufferInfo requestedBuf;
1095        requestedBuf.stream = request->output_buffers[i].stream;
1096        requestedBuf.buffer = NULL;
1097        pendingRequest.buffers.push_back(requestedBuf);
1098
1099        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1100    }
1101    mPendingRequestsList.push_back(pendingRequest);
1102
1103    // Notify metadata channel we receive a request
1104    mMetadataChannel->request(NULL, frameNumber);
1105
1106    // Call request on other streams
1107    for (size_t i = 0; i < request->num_output_buffers; i++) {
1108        const camera3_stream_buffer_t& output = request->output_buffers[i];
1109        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1110        mm_camera_buf_def_t *pInputBuffer = NULL;
1111
1112        if (channel == NULL) {
1113            ALOGE("%s: invalid channel pointer for stream", __func__);
1114            continue;
1115        }
1116
1117        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1118            QCamera3RegularChannel* inputChannel = NULL;
1119            if(request->input_buffer != NULL){
1120                //Try to get the internal format
1121                inputChannel = (QCamera3RegularChannel*)
1122                    request->input_buffer->stream->priv;
1123                if(inputChannel == NULL ){
1124                    ALOGE("%s: failed to get input channel handle", __func__);
1125                } else {
1126                    pInputBuffer =
1127                        inputChannel->getInternalFormatBuffer(
1128                                request->input_buffer->buffer);
1129                    ALOGD("%s: Input buffer dump",__func__);
1130                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1131                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1132                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1133                    ALOGD("Handle:%p", request->input_buffer->buffer);
1134                    //TODO: need to get corresponding metadata and send it to pproc
1135                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1136                         m != mStoredMetadataList.end(); m++) {
1137                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1138                            reproc_meta.meta_buf = m->meta_buf;
1139                            queueMetadata = 1;
1140                            break;
1141                        }
1142                    }
1143                }
1144            }
1145            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1146                            pInputBuffer,(QCamera3Channel*)inputChannel);
1147            if (queueMetadata) {
1148                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1149            }
1150        } else {
1151            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1152                __LINE__, output.buffer, frameNumber);
1153            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1154                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1155                     m != mStoredMetadataList.end(); m++) {
1156                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1157                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1158                            mMetadataChannel->bufDone(m->meta_buf);
1159                            free(m->meta_buf);
1160                            m = mStoredMetadataList.erase(m);
1161                            break;
1162                        }
1163                   }
1164                }
1165            }
1166            rc = channel->request(output.buffer, frameNumber);
1167        }
1168        if (rc < 0)
1169            ALOGE("%s: request failed", __func__);
1170    }
1171
1172    mFirstRequest = false;
1173    // Added a timed condition wait
1174    struct timespec ts;
1175    uint8_t isValidTimeout = 1;
1176    rc = clock_gettime(CLOCK_REALTIME, &ts);
1177    if (rc < 0) {
1178        isValidTimeout = 0;
1179        ALOGE("%s: Error reading the real time clock!!", __func__);
1180    }
1181    else {
1182        // Make timeout as 5 sec for request to be honored
1183        ts.tv_sec += 5;
1184    }
1185    //Block on conditional variable
1186    mPendingRequest = 1;
1187    while (mPendingRequest == 1) {
1188        if (!isValidTimeout) {
1189            ALOGV("%s: Blocking on conditional wait", __func__);
1190            pthread_cond_wait(&mRequestCond, &mMutex);
1191        }
1192        else {
1193            ALOGV("%s: Blocking on timed conditional wait", __func__);
1194            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1195            if (rc == ETIMEDOUT) {
1196                rc = -ENODEV;
1197                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1198                break;
1199            }
1200        }
1201        ALOGV("%s: Unblocked", __func__);
1202    }
1203
1204    pthread_mutex_unlock(&mMutex);
1205    return rc;
1206}
1207
1208/*===========================================================================
1209 * FUNCTION   : getMetadataVendorTagOps
1210 *
1211 * DESCRIPTION:
1212 *
1213 * PARAMETERS :
1214 *
1215 *
1216 * RETURN     :
1217 *==========================================================================*/
1218void QCamera3HardwareInterface::getMetadataVendorTagOps(
1219                    vendor_tag_query_ops_t* /*ops*/)
1220{
1221    /* Enable locks when we eventually add Vendor Tags */
1222    /*
1223    pthread_mutex_lock(&mMutex);
1224
1225    pthread_mutex_unlock(&mMutex);
1226    */
1227    return;
1228}
1229
1230/*===========================================================================
1231 * FUNCTION   : dump
1232 *
1233 * DESCRIPTION:
1234 *
1235 * PARAMETERS :
1236 *
1237 *
1238 * RETURN     :
1239 *==========================================================================*/
1240void QCamera3HardwareInterface::dump(int /*fd*/)
1241{
1242    /*Enable lock when we implement this function*/
1243    /*
1244    pthread_mutex_lock(&mMutex);
1245
1246    pthread_mutex_unlock(&mMutex);
1247    */
1248    return;
1249}
1250
1251/*===========================================================================
1252 * FUNCTION   : flush
1253 *
1254 * DESCRIPTION:
1255 *
1256 * PARAMETERS :
1257 *
1258 *
1259 * RETURN     :
1260 *==========================================================================*/
1261int QCamera3HardwareInterface::flush()
1262{
1263    /*Enable lock when we implement this function*/
1264    /*
1265    pthread_mutex_lock(&mMutex);
1266
1267    pthread_mutex_unlock(&mMutex);
1268    */
1269    return 0;
1270}
1271
1272/*===========================================================================
1273 * FUNCTION   : captureResultCb
1274 *
1275 * DESCRIPTION: Callback handler for all capture result
1276 *              (streams, as well as metadata)
1277 *
1278 * PARAMETERS :
1279 *   @metadata : metadata information
1280 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1281 *               NULL if metadata.
1282 *
1283 * RETURN     : NONE
1284 *==========================================================================*/
1285void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1286                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1287{
1288    pthread_mutex_lock(&mMutex);
1289
1290    if (metadata_buf) {
1291        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1292        int32_t frame_number_valid = *(int32_t *)
1293            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1294        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1295            CAM_INTF_META_PENDING_REQUESTS, metadata);
1296        uint32_t frame_number = *(uint32_t *)
1297            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1298        const struct timeval *tv = (const struct timeval *)
1299            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1300        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1301            tv->tv_usec * NSEC_PER_USEC;
1302
1303        if (!frame_number_valid) {
1304            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1305            mMetadataChannel->bufDone(metadata_buf);
1306            goto done_metadata;
1307        }
1308        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1309                frame_number, capture_time);
1310
1311        // Go through the pending requests info and send shutter/results to frameworks
1312        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1313                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1314            camera3_capture_result_t result;
1315            camera3_notify_msg_t notify_msg;
1316            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1317
1318            // Flush out all entries with less or equal frame numbers.
1319
1320            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1321            //Right now it's the same as metadata timestamp
1322
1323            //TODO: When there is metadata drop, how do we derive the timestamp of
1324            //dropped frames? For now, we fake the dropped timestamp by substracting
1325            //from the reported timestamp
1326            nsecs_t current_capture_time = capture_time -
1327                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1328
1329            // Send shutter notify to frameworks
1330            notify_msg.type = CAMERA3_MSG_SHUTTER;
1331            notify_msg.message.shutter.frame_number = i->frame_number;
1332            notify_msg.message.shutter.timestamp = current_capture_time;
1333            mCallbackOps->notify(mCallbackOps, &notify_msg);
1334            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1335                    i->frame_number, capture_time);
1336
1337            // Send empty metadata with already filled buffers for dropped metadata
1338            // and send valid metadata with already filled buffers for current metadata
1339            if (i->frame_number < frame_number) {
1340                CameraMetadata dummyMetadata;
1341                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1342                        &current_capture_time, 1);
1343                dummyMetadata.update(ANDROID_REQUEST_ID,
1344                        &(i->request_id), 1);
1345                result.result = dummyMetadata.release();
1346            } else {
1347                result.result = translateCbMetadataToResultMetadata(metadata,
1348                        current_capture_time, i->request_id);
1349                if (mIsZslMode) {
1350                   int found_metadata = 0;
1351                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1352                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1353                        j != i->buffers.end(); j++) {
1354                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1355                         //check if corresp. zsl already exists in the stored metadata list
1356                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1357                               m != mStoredMetadataList.begin(); m++) {
1358                            if (m->frame_number == frame_number) {
1359                               m->meta_buf = metadata_buf;
1360                               found_metadata = 1;
1361                               break;
1362                            }
1363                         }
1364                         if (!found_metadata) {
1365                            MetadataBufferInfo store_meta_info;
1366                            store_meta_info.meta_buf = metadata_buf;
1367                            store_meta_info.frame_number = frame_number;
1368                            mStoredMetadataList.push_back(store_meta_info);
1369                            found_metadata = 1;
1370                         }
1371                      }
1372                   }
1373                   if (!found_metadata) {
1374                       if (!i->input_buffer_present && i->blob_request) {
1375                          //livesnapshot or fallback non-zsl snapshot case
1376                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1377                                j != i->buffers.end(); j++){
1378                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1379                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1380                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1381                                 break;
1382                              }
1383                         }
1384                       } else {
1385                            //return the metadata immediately
1386                            mMetadataChannel->bufDone(metadata_buf);
1387                            free(metadata_buf);
1388                       }
1389                   }
1390               } else if (!mIsZslMode && i->blob_request) {
1391                   //If it is a blob request then send the metadata to the picture channel
1392                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1393               } else {
1394                   // Return metadata buffer
1395                   mMetadataChannel->bufDone(metadata_buf);
1396                   free(metadata_buf);
1397               }
1398
1399            }
1400            if (!result.result) {
1401                ALOGE("%s: metadata is NULL", __func__);
1402            }
1403            result.frame_number = i->frame_number;
1404            result.num_output_buffers = 0;
1405            result.output_buffers = NULL;
1406            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1407                    j != i->buffers.end(); j++) {
1408                if (j->buffer) {
1409                    result.num_output_buffers++;
1410                }
1411            }
1412
1413            if (result.num_output_buffers > 0) {
1414                camera3_stream_buffer_t *result_buffers =
1415                    new camera3_stream_buffer_t[result.num_output_buffers];
1416                if (!result_buffers) {
1417                    ALOGE("%s: Fatal error: out of memory", __func__);
1418                }
1419                size_t result_buffers_idx = 0;
1420                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1421                        j != i->buffers.end(); j++) {
1422                    if (j->buffer) {
1423                        result_buffers[result_buffers_idx++] = *(j->buffer);
1424                        free(j->buffer);
1425                        j->buffer = NULL;
1426                        mPendingBuffersMap.editValueFor(j->stream)--;
1427                    }
1428                }
1429                result.output_buffers = result_buffers;
1430
1431                mCallbackOps->process_capture_result(mCallbackOps, &result);
1432                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1433                        __func__, result.frame_number, current_capture_time);
1434                free_camera_metadata((camera_metadata_t *)result.result);
1435                delete[] result_buffers;
1436            } else {
1437                mCallbackOps->process_capture_result(mCallbackOps, &result);
1438                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1439                        __func__, result.frame_number, current_capture_time);
1440                free_camera_metadata((camera_metadata_t *)result.result);
1441            }
1442            // erase the element from the list
1443            i = mPendingRequestsList.erase(i);
1444        }
1445
1446
1447done_metadata:
1448        bool max_buffers_dequeued = false;
1449        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1450            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1451            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1452            if (queued_buffers == stream->max_buffers) {
1453                max_buffers_dequeued = true;
1454                break;
1455            }
1456        }
1457        if (!max_buffers_dequeued && !pending_requests) {
1458            // Unblock process_capture_request
1459            mPendingRequest = 0;
1460            pthread_cond_signal(&mRequestCond);
1461        }
1462    } else {
1463        // If the frame number doesn't exist in the pending request list,
1464        // directly send the buffer to the frameworks, and update pending buffers map
1465        // Otherwise, book-keep the buffer.
1466        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1467        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1468            i++;
1469        }
1470        if (i == mPendingRequestsList.end()) {
1471            // Verify all pending requests frame_numbers are greater
1472            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1473                    j != mPendingRequestsList.end(); j++) {
1474                if (j->frame_number < frame_number) {
1475                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1476                            __func__, j->frame_number, frame_number);
1477                }
1478            }
1479            camera3_capture_result_t result;
1480            result.result = NULL;
1481            result.frame_number = frame_number;
1482            result.num_output_buffers = 1;
1483            result.output_buffers = buffer;
1484            ALOGV("%s: result frame_number = %d, buffer = %p",
1485                    __func__, frame_number, buffer);
1486            mPendingBuffersMap.editValueFor(buffer->stream)--;
1487            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1488                int found = 0;
1489                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1490                      k != mStoredMetadataList.end(); k++) {
1491                    if (k->frame_number == frame_number) {
1492                        k->zsl_buf_hdl = buffer->buffer;
1493                        found = 1;
1494                        break;
1495                    }
1496                }
1497                if (!found) {
1498                   MetadataBufferInfo meta_info;
1499                   meta_info.frame_number = frame_number;
1500                   meta_info.zsl_buf_hdl = buffer->buffer;
1501                   mStoredMetadataList.push_back(meta_info);
1502                }
1503            }
1504            mCallbackOps->process_capture_result(mCallbackOps, &result);
1505        } else {
1506            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1507                    j != i->buffers.end(); j++) {
1508                if (j->stream == buffer->stream) {
1509                    if (j->buffer != NULL) {
1510                        ALOGE("%s: Error: buffer is already set", __func__);
1511                    } else {
1512                        j->buffer = (camera3_stream_buffer_t *)malloc(
1513                                sizeof(camera3_stream_buffer_t));
1514                        *(j->buffer) = *buffer;
1515                        ALOGV("%s: cache buffer %p at result frame_number %d",
1516                                __func__, buffer, frame_number);
1517                    }
1518                }
1519            }
1520        }
1521    }
1522    pthread_mutex_unlock(&mMutex);
1523    return;
1524}
1525
1526/*===========================================================================
1527 * FUNCTION   : translateCbMetadataToResultMetadata
1528 *
1529 * DESCRIPTION:
1530 *
1531 * PARAMETERS :
1532 *   @metadata : metadata information from callback
1533 *
1534 * RETURN     : camera_metadata_t*
1535 *              metadata in a format specified by fwk
1536 *==========================================================================*/
1537camera_metadata_t*
1538QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1539                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1540                                 int32_t request_id)
1541{
1542    CameraMetadata camMetadata;
1543    camera_metadata_t* resultMetadata;
1544
1545    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1546    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1547
1548    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1549    uint8_t next_entry;
1550    while (curr_entry != CAM_INTF_PARM_MAX) {
1551       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1552       switch (curr_entry) {
1553         case CAM_INTF_META_FACE_DETECTION:{
1554             cam_face_detection_data_t *faceDetectionInfo =
1555                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1556             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1557             int32_t faceIds[numFaces];
1558             uint8_t faceScores[numFaces];
1559             int32_t faceRectangles[numFaces * 4];
1560             int32_t faceLandmarks[numFaces * 6];
1561             int j = 0, k = 0;
1562             for (int i = 0; i < numFaces; i++) {
1563                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1564                 faceScores[i] = faceDetectionInfo->faces[i].score;
1565                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1566                         faceRectangles+j, -1);
1567                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1568                 j+= 4;
1569                 k+= 6;
1570             }
1571             if (numFaces > 0) {
1572                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1573                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1574                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1575                     faceRectangles, numFaces*4);
1576                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1577                     faceLandmarks, numFaces*6);
1578             }
1579            break;
1580            }
1581         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1582             uint8_t  *color_correct_mode =
1583                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1584             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1585             break;
1586          }
1587         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1588             int32_t  *ae_precapture_id =
1589                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1590             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1591             break;
1592          }
1593         case CAM_INTF_META_AEC_ROI: {
1594            cam_area_t  *hAeRegions =
1595                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1596             int32_t aeRegions[5];
1597             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1598             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1599             break;
1600          }
1601          case CAM_INTF_META_AEC_STATE:{
1602             uint8_t *ae_state =
1603                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1604             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1605             break;
1606          }
1607          case CAM_INTF_PARM_FOCUS_MODE:{
1608             uint8_t  *focusMode =
1609                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1610             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1611                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1612             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1613             break;
1614          }
1615          case CAM_INTF_META_AF_ROI:{
1616             /*af regions*/
1617             cam_area_t  *hAfRegions =
1618                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1619             int32_t afRegions[5];
1620             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1621             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1622             break;
1623          }
1624          case CAM_INTF_META_AF_STATE: {
1625             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1626             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1627             break;
1628          }
1629          case CAM_INTF_META_AF_TRIGGER_ID: {
1630             int32_t  *afTriggerId =
1631                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1632             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1633             break;
1634          }
1635          case CAM_INTF_PARM_WHITE_BALANCE: {
1636               uint8_t  *whiteBalance =
1637                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1638               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1639                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1640                   *whiteBalance);
1641               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1642               break;
1643          }
1644          case CAM_INTF_META_AWB_REGIONS: {
1645             /*awb regions*/
1646             cam_area_t  *hAwbRegions =
1647                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1648             int32_t awbRegions[5];
1649             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1650             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1651             break;
1652          }
1653          case CAM_INTF_META_AWB_STATE: {
1654             uint8_t  *whiteBalanceState =
1655                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1656             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1657             break;
1658          }
1659          case CAM_INTF_META_MODE: {
1660             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1661             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1662             break;
1663          }
1664          case CAM_INTF_META_EDGE_MODE: {
1665             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1666             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1667             break;
1668          }
1669          case CAM_INTF_META_FLASH_POWER: {
1670             uint8_t  *flashPower =
1671                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1672             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1673             break;
1674          }
1675          case CAM_INTF_META_FLASH_FIRING_TIME: {
1676             int64_t  *flashFiringTime =
1677                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1678             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1679             break;
1680          }
1681          case CAM_INTF_META_FLASH_STATE: {
1682             uint8_t  *flashState =
1683                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1684             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1685             break;
1686          }
1687          case CAM_INTF_META_HOTPIXEL_MODE: {
1688              uint8_t  *hotPixelMode =
1689                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1690              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1691              break;
1692          }
1693          case CAM_INTF_META_LENS_APERTURE:{
1694             float  *lensAperture =
1695                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1696             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1697             break;
1698          }
1699          case CAM_INTF_META_LENS_FILTERDENSITY: {
1700             float  *filterDensity =
1701                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1702             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1703             break;
1704          }
1705          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1706             float  *focalLength =
1707                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1708             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1709             break;
1710          }
1711          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1712             float  *focusDistance =
1713                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1714             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1715             break;
1716          }
1717          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1718             float  *focusRange =
1719                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1720             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1721          }
1722          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1723             uint8_t  *opticalStab =
1724                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1725             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1726          }
1727          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1728             uint8_t  *noiseRedMode =
1729                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1730             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1731             break;
1732          }
1733          case CAM_INTF_META_SCALER_CROP_REGION: {
1734             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1735             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1736             int32_t scalerCropRegion[4];
1737             scalerCropRegion[0] = hScalerCropRegion->left;
1738             scalerCropRegion[1] = hScalerCropRegion->top;
1739             scalerCropRegion[2] = hScalerCropRegion->width;
1740             scalerCropRegion[3] = hScalerCropRegion->height;
1741             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1742             break;
1743          }
1744          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1745             int64_t  *sensorExpTime =
1746                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1747             mMetadataResponse.exposure_time = *sensorExpTime;
1748             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1749             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1750             break;
1751          }
1752          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1753             int64_t  *sensorFameDuration =
1754                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1755             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1756             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1757             break;
1758          }
1759          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1760             int32_t  *sensorSensitivity =
1761                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1762             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1763             mMetadataResponse.iso_speed = *sensorSensitivity;
1764             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1765             break;
1766          }
1767          case CAM_INTF_META_SHADING_MODE: {
1768             uint8_t  *shadingMode =
1769                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1770             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1771             break;
1772          }
1773          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1774             uint8_t  *faceDetectMode =
1775                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1776             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1777                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1778                                                        *faceDetectMode);
1779             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1780             break;
1781          }
1782          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1783             uint8_t  *histogramMode =
1784                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1785             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1786             break;
1787          }
1788          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1789               uint8_t  *sharpnessMapMode =
1790                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1791               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1792                                  sharpnessMapMode, 1);
1793               break;
1794           }
1795          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1796               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1797               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1798               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1799                                  (int32_t*)sharpnessMap->sharpness,
1800                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1801               break;
1802          }
1803          case CAM_INTF_META_LENS_SHADING_MAP: {
1804               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1805               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1806               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1807               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1808               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1809                                  (float*)lensShadingMap->lens_shading,
1810                                  4*map_width*map_height);
1811               break;
1812          }
1813          case CAM_INTF_META_TONEMAP_CURVES:{
1814             //Populate CAM_INTF_META_TONEMAP_CURVES
1815             /* ch0 = G, ch 1 = B, ch 2 = R*/
1816             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1817             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1818             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1819                                (float*)tonemap->curves[0].tonemap_points,
1820                                tonemap->tonemap_points_cnt * 2);
1821
1822             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1823                                (float*)tonemap->curves[1].tonemap_points,
1824                                tonemap->tonemap_points_cnt * 2);
1825
1826             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1827                                (float*)tonemap->curves[2].tonemap_points,
1828                                tonemap->tonemap_points_cnt * 2);
1829             break;
1830          }
1831          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1832             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1833             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1834             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1835             break;
1836          }
1837          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1838              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1839              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1840              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1841                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1842              break;
1843          }
1844          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1845             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1846             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1847             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1848                       predColorCorrectionGains->gains, 4);
1849             break;
1850          }
1851          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1852             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1853                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1854             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1855                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1856             break;
1857
1858          }
1859          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1860             uint8_t *blackLevelLock = (uint8_t*)
1861               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1862             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1863             break;
1864          }
1865          case CAM_INTF_META_SCENE_FLICKER:{
1866             uint8_t *sceneFlicker = (uint8_t*)
1867             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1868             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1869             break;
1870          }
1871          case CAM_INTF_PARM_LED_MODE:
1872             break;
1873          default:
1874             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1875                   __func__, curr_entry);
1876             break;
1877       }
1878       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1879       curr_entry = next_entry;
1880    }
1881    resultMetadata = camMetadata.release();
1882    return resultMetadata;
1883}
1884
1885/*===========================================================================
1886 * FUNCTION   : convertToRegions
1887 *
1888 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1889 *
1890 * PARAMETERS :
1891 *   @rect   : cam_rect_t struct to convert
1892 *   @region : int32_t destination array
1893 *   @weight : if we are converting from cam_area_t, weight is valid
1894 *             else weight = -1
1895 *
1896 *==========================================================================*/
1897void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1898    region[0] = rect.left;
1899    region[1] = rect.top;
1900    region[2] = rect.left + rect.width;
1901    region[3] = rect.top + rect.height;
1902    if (weight > -1) {
1903        region[4] = weight;
1904    }
1905}
1906
1907/*===========================================================================
1908 * FUNCTION   : convertFromRegions
1909 *
1910 * DESCRIPTION: helper method to convert from array to cam_rect_t
1911 *
1912 * PARAMETERS :
1913 *   @rect   : cam_rect_t struct to convert
1914 *   @region : int32_t destination array
1915 *   @weight : if we are converting from cam_area_t, weight is valid
1916 *             else weight = -1
1917 *
1918 *==========================================================================*/
1919void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1920                                                   const camera_metadata_t *settings,
1921                                                   uint32_t tag){
1922    CameraMetadata frame_settings;
1923    frame_settings = settings;
1924    int32_t x_min = frame_settings.find(tag).data.i32[0];
1925    int32_t y_min = frame_settings.find(tag).data.i32[1];
1926    int32_t x_max = frame_settings.find(tag).data.i32[2];
1927    int32_t y_max = frame_settings.find(tag).data.i32[3];
1928    roi->weight = frame_settings.find(tag).data.i32[4];
1929    roi->rect.left = x_min;
1930    roi->rect.top = y_min;
1931    roi->rect.width = x_max - x_min;
1932    roi->rect.height = y_max - y_min;
1933}
1934
1935/*===========================================================================
1936 * FUNCTION   : resetIfNeededROI
1937 *
1938 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1939 *              crop region
1940 *
1941 * PARAMETERS :
1942 *   @roi       : cam_area_t struct to resize
1943 *   @scalerCropRegion : cam_crop_region_t region to compare against
1944 *
1945 *
1946 *==========================================================================*/
1947bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1948                                                 const cam_crop_region_t* scalerCropRegion)
1949{
1950    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1951    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1952    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1953    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1954    if ((roi_x_max < scalerCropRegion->left) ||
1955        (roi_y_max < scalerCropRegion->top)  ||
1956        (roi->rect.left > crop_x_max) ||
1957        (roi->rect.top > crop_y_max)){
1958        return false;
1959    }
1960    if (roi->rect.left < scalerCropRegion->left) {
1961        roi->rect.left = scalerCropRegion->left;
1962    }
1963    if (roi->rect.top < scalerCropRegion->top) {
1964        roi->rect.top = scalerCropRegion->top;
1965    }
1966    if (roi_x_max > crop_x_max) {
1967        roi_x_max = crop_x_max;
1968    }
1969    if (roi_y_max > crop_y_max) {
1970        roi_y_max = crop_y_max;
1971    }
1972    roi->rect.width = roi_x_max - roi->rect.left;
1973    roi->rect.height = roi_y_max - roi->rect.top;
1974    return true;
1975}
1976
1977/*===========================================================================
1978 * FUNCTION   : convertLandmarks
1979 *
1980 * DESCRIPTION: helper method to extract the landmarks from face detection info
1981 *
1982 * PARAMETERS :
1983 *   @face   : cam_rect_t struct to convert
1984 *   @landmarks : int32_t destination array
1985 *
1986 *
1987 *==========================================================================*/
1988void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1989{
1990    landmarks[0] = face.left_eye_center.x;
1991    landmarks[1] = face.left_eye_center.y;
1992    landmarks[2] = face.right_eye_center.y;
1993    landmarks[3] = face.right_eye_center.y;
1994    landmarks[4] = face.mouth_center.x;
1995    landmarks[5] = face.mouth_center.y;
1996}
1997
1998#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1999/*===========================================================================
2000 * FUNCTION   : initCapabilities
2001 *
2002 * DESCRIPTION: initialize camera capabilities in static data struct
2003 *
2004 * PARAMETERS :
2005 *   @cameraId  : camera Id
2006 *
2007 * RETURN     : int32_t type of status
2008 *              NO_ERROR  -- success
2009 *              none-zero failure code
2010 *==========================================================================*/
2011int QCamera3HardwareInterface::initCapabilities(int cameraId)
2012{
2013    int rc = 0;
2014    mm_camera_vtbl_t *cameraHandle = NULL;
2015    QCamera3HeapMemory *capabilityHeap = NULL;
2016
2017    cameraHandle = camera_open(cameraId);
2018    if (!cameraHandle) {
2019        ALOGE("%s: camera_open failed", __func__);
2020        rc = -1;
2021        goto open_failed;
2022    }
2023
2024    capabilityHeap = new QCamera3HeapMemory();
2025    if (capabilityHeap == NULL) {
2026        ALOGE("%s: creation of capabilityHeap failed", __func__);
2027        goto heap_creation_failed;
2028    }
2029    /* Allocate memory for capability buffer */
2030    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2031    if(rc != OK) {
2032        ALOGE("%s: No memory for cappability", __func__);
2033        goto allocate_failed;
2034    }
2035
2036    /* Map memory for capability buffer */
2037    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2038    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2039                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2040                                capabilityHeap->getFd(0),
2041                                sizeof(cam_capability_t));
2042    if(rc < 0) {
2043        ALOGE("%s: failed to map capability buffer", __func__);
2044        goto map_failed;
2045    }
2046
2047    /* Query Capability */
2048    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2049    if(rc < 0) {
2050        ALOGE("%s: failed to query capability",__func__);
2051        goto query_failed;
2052    }
2053    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2054    if (!gCamCapability[cameraId]) {
2055        ALOGE("%s: out of memory", __func__);
2056        goto query_failed;
2057    }
2058    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2059                                        sizeof(cam_capability_t));
2060    rc = 0;
2061
2062query_failed:
2063    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2064                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2065map_failed:
2066    capabilityHeap->deallocate();
2067allocate_failed:
2068    delete capabilityHeap;
2069heap_creation_failed:
2070    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2071    cameraHandle = NULL;
2072open_failed:
2073    return rc;
2074}
2075
2076/*===========================================================================
2077 * FUNCTION   : initParameters
2078 *
2079 * DESCRIPTION: initialize camera parameters
2080 *
2081 * PARAMETERS :
2082 *
2083 * RETURN     : int32_t type of status
2084 *              NO_ERROR  -- success
2085 *              none-zero failure code
2086 *==========================================================================*/
2087int QCamera3HardwareInterface::initParameters()
2088{
2089    int rc = 0;
2090
2091    //Allocate Set Param Buffer
2092    mParamHeap = new QCamera3HeapMemory();
2093    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2094    if(rc != OK) {
2095        rc = NO_MEMORY;
2096        ALOGE("Failed to allocate SETPARM Heap memory");
2097        delete mParamHeap;
2098        mParamHeap = NULL;
2099        return rc;
2100    }
2101
2102    //Map memory for parameters buffer
2103    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2104            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2105            mParamHeap->getFd(0),
2106            sizeof(parm_buffer_t));
2107    if(rc < 0) {
2108        ALOGE("%s:failed to map SETPARM buffer",__func__);
2109        rc = FAILED_TRANSACTION;
2110        mParamHeap->deallocate();
2111        delete mParamHeap;
2112        mParamHeap = NULL;
2113        return rc;
2114    }
2115
2116    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2117    return rc;
2118}
2119
2120/*===========================================================================
2121 * FUNCTION   : deinitParameters
2122 *
2123 * DESCRIPTION: de-initialize camera parameters
2124 *
2125 * PARAMETERS :
2126 *
2127 * RETURN     : NONE
2128 *==========================================================================*/
2129void QCamera3HardwareInterface::deinitParameters()
2130{
2131    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2132            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2133
2134    mParamHeap->deallocate();
2135    delete mParamHeap;
2136    mParamHeap = NULL;
2137
2138    mParameters = NULL;
2139}
2140
2141/*===========================================================================
2142 * FUNCTION   : calcMaxJpegSize
2143 *
2144 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2145 *
2146 * PARAMETERS :
2147 *
2148 * RETURN     : max_jpeg_size
2149 *==========================================================================*/
2150int QCamera3HardwareInterface::calcMaxJpegSize()
2151{
2152    int32_t max_jpeg_size = 0;
2153    int temp_width, temp_height;
2154    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2155        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2156        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2157        if (temp_width * temp_height > max_jpeg_size ) {
2158            max_jpeg_size = temp_width * temp_height;
2159        }
2160    }
2161    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2162    return max_jpeg_size;
2163}
2164
2165/*===========================================================================
2166 * FUNCTION   : initStaticMetadata
2167 *
2168 * DESCRIPTION: initialize the static metadata
2169 *
2170 * PARAMETERS :
2171 *   @cameraId  : camera Id
2172 *
2173 * RETURN     : int32_t type of status
2174 *              0  -- success
2175 *              non-zero failure code
2176 *==========================================================================*/
2177int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2178{
2179    int rc = 0;
2180    CameraMetadata staticInfo;
2181
2182    /* android.info: hardware level */
2183    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2184    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2185        &supportedHardwareLevel, 1);
2186
2187    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2188    /*HAL 3 only*/
2189    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2190                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2191
2192    /*hard coded for now but this should come from sensor*/
2193    float min_focus_distance;
2194    if(facingBack){
2195        min_focus_distance = 10;
2196    } else {
2197        min_focus_distance = 0;
2198    }
2199    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2200                    &min_focus_distance, 1);
2201
2202    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2203                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2204
2205    /*should be using focal lengths but sensor doesn't provide that info now*/
2206    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2207                      &gCamCapability[cameraId]->focal_length,
2208                      1);
2209
2210    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2211                      gCamCapability[cameraId]->apertures,
2212                      gCamCapability[cameraId]->apertures_count);
2213
2214    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2215                gCamCapability[cameraId]->filter_densities,
2216                gCamCapability[cameraId]->filter_densities_count);
2217
2218
2219    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2220                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2221                      gCamCapability[cameraId]->optical_stab_modes_count);
2222
2223    staticInfo.update(ANDROID_LENS_POSITION,
2224                      gCamCapability[cameraId]->lens_position,
2225                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2226
2227    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2228                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2229    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2230                      lens_shading_map_size,
2231                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2232
2233    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2234                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2235    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2236            geo_correction_map_size,
2237            sizeof(geo_correction_map_size)/sizeof(int32_t));
2238
2239    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2240                       gCamCapability[cameraId]->geo_correction_map,
2241                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2242
2243    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2244            gCamCapability[cameraId]->sensor_physical_size, 2);
2245
2246    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2247            gCamCapability[cameraId]->exposure_time_range, 2);
2248
2249    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2250            &gCamCapability[cameraId]->max_frame_duration, 1);
2251
2252
2253    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2254                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2255
2256    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2257                                               gCamCapability[cameraId]->pixel_array_size.height};
2258    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2259                      pixel_array_size, 2);
2260
2261    int32_t active_array_size[] = {0, 0,
2262                                                gCamCapability[cameraId]->active_array_size.width,
2263                                                gCamCapability[cameraId]->active_array_size.height};
2264    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2265                      active_array_size, 4);
2266
2267    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2268            &gCamCapability[cameraId]->white_level, 1);
2269
2270    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2271            gCamCapability[cameraId]->black_level_pattern, 4);
2272
2273    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2274                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2275
2276    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2277                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2278
2279    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2280                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2281
2282    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2283                      &gCamCapability[cameraId]->histogram_size, 1);
2284
2285    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2286            &gCamCapability[cameraId]->max_histogram_count, 1);
2287
2288    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2289                                                gCamCapability[cameraId]->sharpness_map_size.height};
2290
2291    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2292            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2293
2294    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2295            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2296
2297
2298    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2299                      &gCamCapability[cameraId]->raw_min_duration,
2300                       1);
2301
2302    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2303                                                HAL_PIXEL_FORMAT_BLOB};
2304    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2305    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2306                      scalar_formats,
2307                      scalar_formats_count);
2308
2309    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2310    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2311              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2312              available_processed_sizes);
2313    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2314                available_processed_sizes,
2315                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2316
2317    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2318                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2319                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2320
2321    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2322    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2323                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2324                 available_fps_ranges);
2325    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2326            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2327
2328    camera_metadata_rational exposureCompensationStep = {
2329            gCamCapability[cameraId]->exp_compensation_step.numerator,
2330            gCamCapability[cameraId]->exp_compensation_step.denominator};
2331    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2332                      &exposureCompensationStep, 1);
2333
2334    /*TO DO*/
2335    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2336    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2337                      availableVstabModes, sizeof(availableVstabModes));
2338
2339    /*HAL 1 and HAL 3 common*/
2340    float maxZoom = 4;
2341    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2342            &maxZoom, 1);
2343
2344    int32_t max3aRegions = 1;
2345    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2346            &max3aRegions, 1);
2347
2348    uint8_t availableFaceDetectModes[] = {
2349            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2350            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2351    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2352                      availableFaceDetectModes,
2353                      sizeof(availableFaceDetectModes));
2354
2355    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2356                                       gCamCapability[cameraId]->raw_dim.height};
2357    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2358                      raw_size,
2359                      sizeof(raw_size)/sizeof(uint32_t));
2360
2361    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2362                                                        gCamCapability[cameraId]->exposure_compensation_max};
2363    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2364            exposureCompensationRange,
2365            sizeof(exposureCompensationRange)/sizeof(int32_t));
2366
2367    uint8_t lensFacing = (facingBack) ?
2368            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2369    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2370
2371    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2372                available_processed_sizes,
2373                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2374
2375    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2376                      available_thumbnail_sizes,
2377                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2378
2379    int32_t max_jpeg_size = 0;
2380    int temp_width, temp_height;
2381    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2382        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2383        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2384        if (temp_width * temp_height > max_jpeg_size ) {
2385            max_jpeg_size = temp_width * temp_height;
2386        }
2387    }
2388    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2389    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2390                      &max_jpeg_size, 1);
2391
2392    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2393    int32_t size = 0;
2394    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2395        int val = lookupFwkName(EFFECT_MODES_MAP,
2396                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2397                                   gCamCapability[cameraId]->supported_effects[i]);
2398        if (val != NAME_NOT_FOUND) {
2399            avail_effects[size] = (uint8_t)val;
2400            size++;
2401        }
2402    }
2403    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2404                      avail_effects,
2405                      size);
2406
2407    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2408    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2409    int32_t supported_scene_modes_cnt = 0;
2410    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2411        int val = lookupFwkName(SCENE_MODES_MAP,
2412                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2413                                gCamCapability[cameraId]->supported_scene_modes[i]);
2414        if (val != NAME_NOT_FOUND) {
2415            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2416            supported_indexes[supported_scene_modes_cnt] = i;
2417            supported_scene_modes_cnt++;
2418        }
2419    }
2420
2421    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2422                      avail_scene_modes,
2423                      supported_scene_modes_cnt);
2424
2425    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2426    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2427                      supported_scene_modes_cnt,
2428                      scene_mode_overrides,
2429                      supported_indexes,
2430                      cameraId);
2431    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2432                      scene_mode_overrides,
2433                      supported_scene_modes_cnt*3);
2434
2435    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2436    size = 0;
2437    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2438        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2439                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2440                                 gCamCapability[cameraId]->supported_antibandings[i]);
2441        if (val != NAME_NOT_FOUND) {
2442            avail_antibanding_modes[size] = (uint8_t)val;
2443            size++;
2444        }
2445
2446    }
2447    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2448                      avail_antibanding_modes,
2449                      size);
2450
2451    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2452    size = 0;
2453    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2454        int val = lookupFwkName(FOCUS_MODES_MAP,
2455                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2456                                gCamCapability[cameraId]->supported_focus_modes[i]);
2457        if (val != NAME_NOT_FOUND) {
2458            avail_af_modes[size] = (uint8_t)val;
2459            size++;
2460        }
2461    }
2462    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2463                      avail_af_modes,
2464                      size);
2465
2466    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2467    size = 0;
2468    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2469        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2470                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2471                                    gCamCapability[cameraId]->supported_white_balances[i]);
2472        if (val != NAME_NOT_FOUND) {
2473            avail_awb_modes[size] = (uint8_t)val;
2474            size++;
2475        }
2476    }
2477    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2478                      avail_awb_modes,
2479                      size);
2480
2481    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2482    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2483      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2484
2485    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2486            available_flash_levels,
2487            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2488
2489
2490    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2491    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2492            &flashAvailable, 1);
2493
2494    uint8_t avail_ae_modes[5];
2495    size = 0;
2496    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2497        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2498        size++;
2499    }
2500    if (flashAvailable) {
2501        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2502        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2503        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2504    }
2505    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2506                      avail_ae_modes,
2507                      size);
2508
2509    int32_t sensitivity_range[2];
2510    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2511    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2512    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2513                      sensitivity_range,
2514                      sizeof(sensitivity_range) / sizeof(int32_t));
2515
2516    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2517                      &gCamCapability[cameraId]->max_analog_sensitivity,
2518                      1);
2519
2520    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2521                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2522                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2523
2524    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2525    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2526                      &sensor_orientation,
2527                      1);
2528
2529    int32_t max_output_streams[3] = {1, 3, 1};
2530    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2531                      max_output_streams,
2532                      3);
2533
2534    gStaticMetadata[cameraId] = staticInfo.release();
2535    return rc;
2536}
2537
2538/*===========================================================================
2539 * FUNCTION   : makeTable
2540 *
2541 * DESCRIPTION: make a table of sizes
2542 *
2543 * PARAMETERS :
2544 *
2545 *
2546 *==========================================================================*/
2547void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2548                                          int32_t* sizeTable)
2549{
2550    int j = 0;
2551    for (int i = 0; i < size; i++) {
2552        sizeTable[j] = dimTable[i].width;
2553        sizeTable[j+1] = dimTable[i].height;
2554        j+=2;
2555    }
2556}
2557
2558/*===========================================================================
2559 * FUNCTION   : makeFPSTable
2560 *
2561 * DESCRIPTION: make a table of fps ranges
2562 *
2563 * PARAMETERS :
2564 *
2565 *==========================================================================*/
2566void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2567                                          int32_t* fpsRangesTable)
2568{
2569    int j = 0;
2570    for (int i = 0; i < size; i++) {
2571        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2572        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2573        j+=2;
2574    }
2575}
2576
2577/*===========================================================================
2578 * FUNCTION   : makeOverridesList
2579 *
2580 * DESCRIPTION: make a list of scene mode overrides
2581 *
2582 * PARAMETERS :
2583 *
2584 *
2585 *==========================================================================*/
2586void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2587                                                  uint8_t size, uint8_t* overridesList,
2588                                                  uint8_t* supported_indexes,
2589                                                  int camera_id)
2590{
2591    /*daemon will give a list of overrides for all scene modes.
2592      However we should send the fwk only the overrides for the scene modes
2593      supported by the framework*/
2594    int j = 0, index = 0, supt = 0;
2595    uint8_t focus_override;
2596    for (int i = 0; i < size; i++) {
2597        supt = 0;
2598        index = supported_indexes[i];
2599        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2600        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2601                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2602                                                    overridesTable[index].awb_mode);
2603        focus_override = (uint8_t)overridesTable[index].af_mode;
2604        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2605           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2606              supt = 1;
2607              break;
2608           }
2609        }
2610        if (supt) {
2611           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2612                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2613                                              focus_override);
2614        } else {
2615           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2616        }
2617        j+=3;
2618    }
2619}
2620
2621/*===========================================================================
2622 * FUNCTION   : getPreviewHalPixelFormat
2623 *
2624 * DESCRIPTION: convert the format to type recognized by framework
2625 *
2626 * PARAMETERS : format : the format from backend
2627 *
2628 ** RETURN    : format recognized by framework
2629 *
2630 *==========================================================================*/
2631int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2632{
2633    int32_t halPixelFormat;
2634
2635    switch (format) {
2636    case CAM_FORMAT_YUV_420_NV12:
2637        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2638        break;
2639    case CAM_FORMAT_YUV_420_NV21:
2640        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2641        break;
2642    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2643        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2644        break;
2645    case CAM_FORMAT_YUV_420_YV12:
2646        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2647        break;
2648    case CAM_FORMAT_YUV_422_NV16:
2649    case CAM_FORMAT_YUV_422_NV61:
2650    default:
2651        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2652        break;
2653    }
2654    return halPixelFormat;
2655}
2656
2657/*===========================================================================
2658 * FUNCTION   : getSensorSensitivity
2659 *
2660 * DESCRIPTION: convert iso_mode to an integer value
2661 *
2662 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2663 *
2664 ** RETURN    : sensitivity supported by sensor
2665 *
2666 *==========================================================================*/
2667int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2668{
2669    int32_t sensitivity;
2670
2671    switch (iso_mode) {
2672    case CAM_ISO_MODE_100:
2673        sensitivity = 100;
2674        break;
2675    case CAM_ISO_MODE_200:
2676        sensitivity = 200;
2677        break;
2678    case CAM_ISO_MODE_400:
2679        sensitivity = 400;
2680        break;
2681    case CAM_ISO_MODE_800:
2682        sensitivity = 800;
2683        break;
2684    case CAM_ISO_MODE_1600:
2685        sensitivity = 1600;
2686        break;
2687    default:
2688        sensitivity = -1;
2689        break;
2690    }
2691    return sensitivity;
2692}
2693
2694
2695/*===========================================================================
2696 * FUNCTION   : AddSetParmEntryToBatch
2697 *
2698 * DESCRIPTION: add set parameter entry into batch
2699 *
2700 * PARAMETERS :
2701 *   @p_table     : ptr to parameter buffer
2702 *   @paramType   : parameter type
2703 *   @paramLength : length of parameter value
2704 *   @paramValue  : ptr to parameter value
2705 *
2706 * RETURN     : int32_t type of status
2707 *              NO_ERROR  -- success
2708 *              none-zero failure code
2709 *==========================================================================*/
2710int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2711                                                          cam_intf_parm_type_t paramType,
2712                                                          uint32_t paramLength,
2713                                                          void *paramValue)
2714{
2715    int position = paramType;
2716    int current, next;
2717
2718    /*************************************************************************
2719    *                 Code to take care of linking next flags                *
2720    *************************************************************************/
2721    current = GET_FIRST_PARAM_ID(p_table);
2722    if (position == current){
2723        //DO NOTHING
2724    } else if (position < current){
2725        SET_NEXT_PARAM_ID(position, p_table, current);
2726        SET_FIRST_PARAM_ID(p_table, position);
2727    } else {
2728        /* Search for the position in the linked list where we need to slot in*/
2729        while (position > GET_NEXT_PARAM_ID(current, p_table))
2730            current = GET_NEXT_PARAM_ID(current, p_table);
2731
2732        /*If node already exists no need to alter linking*/
2733        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2734            next = GET_NEXT_PARAM_ID(current, p_table);
2735            SET_NEXT_PARAM_ID(current, p_table, position);
2736            SET_NEXT_PARAM_ID(position, p_table, next);
2737        }
2738    }
2739
2740    /*************************************************************************
2741    *                   Copy contents into entry                             *
2742    *************************************************************************/
2743
2744    if (paramLength > sizeof(parm_type_t)) {
2745        ALOGE("%s:Size of input larger than max entry size",__func__);
2746        return BAD_VALUE;
2747    }
2748    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2749    return NO_ERROR;
2750}
2751
2752/*===========================================================================
2753 * FUNCTION   : lookupFwkName
2754 *
2755 * DESCRIPTION: In case the enum is not same in fwk and backend
2756 *              make sure the parameter is correctly propogated
2757 *
2758 * PARAMETERS  :
2759 *   @arr      : map between the two enums
2760 *   @len      : len of the map
2761 *   @hal_name : name of the hal_parm to map
2762 *
2763 * RETURN     : int type of status
2764 *              fwk_name  -- success
2765 *              none-zero failure code
2766 *==========================================================================*/
2767int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2768                                             int len, int hal_name)
2769{
2770
2771    for (int i = 0; i < len; i++) {
2772        if (arr[i].hal_name == hal_name)
2773            return arr[i].fwk_name;
2774    }
2775
2776    /* Not able to find matching framework type is not necessarily
2777     * an error case. This happens when mm-camera supports more attributes
2778     * than the frameworks do */
2779    ALOGD("%s: Cannot find matching framework type", __func__);
2780    return NAME_NOT_FOUND;
2781}
2782
2783/*===========================================================================
2784 * FUNCTION   : lookupHalName
2785 *
2786 * DESCRIPTION: In case the enum is not same in fwk and backend
2787 *              make sure the parameter is correctly propogated
2788 *
2789 * PARAMETERS  :
2790 *   @arr      : map between the two enums
2791 *   @len      : len of the map
2792 *   @fwk_name : name of the hal_parm to map
2793 *
2794 * RETURN     : int32_t type of status
2795 *              hal_name  -- success
2796 *              none-zero failure code
2797 *==========================================================================*/
2798int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2799                                             int len, int fwk_name)
2800{
2801    for (int i = 0; i < len; i++) {
2802       if (arr[i].fwk_name == fwk_name)
2803           return arr[i].hal_name;
2804    }
2805    ALOGE("%s: Cannot find matching hal type", __func__);
2806    return NAME_NOT_FOUND;
2807}
2808
2809/*===========================================================================
2810 * FUNCTION   : getCapabilities
2811 *
2812 * DESCRIPTION: query camera capabilities
2813 *
2814 * PARAMETERS :
2815 *   @cameraId  : camera Id
2816 *   @info      : camera info struct to be filled in with camera capabilities
2817 *
2818 * RETURN     : int32_t type of status
2819 *              NO_ERROR  -- success
2820 *              none-zero failure code
2821 *==========================================================================*/
2822int QCamera3HardwareInterface::getCamInfo(int cameraId,
2823                                    struct camera_info *info)
2824{
2825    int rc = 0;
2826
2827    if (NULL == gCamCapability[cameraId]) {
2828        rc = initCapabilities(cameraId);
2829        if (rc < 0) {
2830            //pthread_mutex_unlock(&g_camlock);
2831            return rc;
2832        }
2833    }
2834
2835    if (NULL == gStaticMetadata[cameraId]) {
2836        rc = initStaticMetadata(cameraId);
2837        if (rc < 0) {
2838            return rc;
2839        }
2840    }
2841
2842    switch(gCamCapability[cameraId]->position) {
2843    case CAM_POSITION_BACK:
2844        info->facing = CAMERA_FACING_BACK;
2845        break;
2846
2847    case CAM_POSITION_FRONT:
2848        info->facing = CAMERA_FACING_FRONT;
2849        break;
2850
2851    default:
2852        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2853        rc = -1;
2854        break;
2855    }
2856
2857
2858    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2859    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2860    info->static_camera_characteristics = gStaticMetadata[cameraId];
2861
2862    return rc;
2863}
2864
2865/*===========================================================================
2866 * FUNCTION   : translateMetadata
2867 *
2868 * DESCRIPTION: translate the metadata into camera_metadata_t
2869 *
2870 * PARAMETERS : type of the request
2871 *
2872 *
2873 * RETURN     : success: camera_metadata_t*
2874 *              failure: NULL
2875 *
2876 *==========================================================================*/
2877camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2878{
2879    pthread_mutex_lock(&mMutex);
2880
2881    if (mDefaultMetadata[type] != NULL) {
2882        pthread_mutex_unlock(&mMutex);
2883        return mDefaultMetadata[type];
2884    }
2885    //first time we are handling this request
2886    //fill up the metadata structure using the wrapper class
2887    CameraMetadata settings;
2888    //translate from cam_capability_t to camera_metadata_tag_t
2889    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2890    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2891
2892    /*control*/
2893
2894    uint8_t controlIntent = 0;
2895    switch (type) {
2896      case CAMERA3_TEMPLATE_PREVIEW:
2897        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2898        break;
2899      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2900        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2901        break;
2902      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2903        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2904        break;
2905      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2906        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2907        break;
2908      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2909        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2910        break;
2911      default:
2912        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2913        break;
2914    }
2915    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2916
2917    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2918            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2919
2920    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2921    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2922
2923    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2924    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2925
2926    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2927    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2928
2929    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2930    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2931
2932    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2933    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2934
2935    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2936    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2937
2938    static uint8_t focusMode;
2939    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2940        ALOGE("%s: Setting focus mode to auto", __func__);
2941        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2942    } else {
2943        ALOGE("%s: Setting focus mode to off", __func__);
2944        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2945    }
2946    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2947
2948    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2949    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2950
2951    /*flash*/
2952    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2953    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2954
2955    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2956    settings.update(ANDROID_FLASH_FIRING_POWER,
2957            &flashFiringLevel, 1);
2958
2959    /* lens */
2960    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2961    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2962
2963    if (gCamCapability[mCameraId]->filter_densities_count) {
2964        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2965        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2966                        gCamCapability[mCameraId]->filter_densities_count);
2967    }
2968
2969    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2970    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2971
2972    /* frame duration */
2973    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2974    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2975
2976    /* sensitivity */
2977    static const int32_t default_sensitivity = 100;
2978    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2979
2980    /*edge mode*/
2981    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2982    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2983
2984    /*noise reduction mode*/
2985    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2986    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2987
2988    /*color correction mode*/
2989    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2990    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2991
2992    /*transform matrix mode*/
2993    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2994    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2995
2996    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
2997    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
2998
2999    mDefaultMetadata[type] = settings.release();
3000
3001    pthread_mutex_unlock(&mMutex);
3002    return mDefaultMetadata[type];
3003}
3004
3005/*===========================================================================
3006 * FUNCTION   : setFrameParameters
3007 *
3008 * DESCRIPTION: set parameters per frame as requested in the metadata from
3009 *              framework
3010 *
3011 * PARAMETERS :
3012 *   @request   : request that needs to be serviced
3013 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3014 *
3015 * RETURN     : success: NO_ERROR
3016 *              failure:
3017 *==========================================================================*/
3018int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3019                    uint32_t streamTypeMask)
3020{
3021    /*translate from camera_metadata_t type to parm_type_t*/
3022    int rc = 0;
3023    if (request->settings == NULL && mFirstRequest) {
3024        /*settings cannot be null for the first request*/
3025        return BAD_VALUE;
3026    }
3027
3028    int32_t hal_version = CAM_HAL_V3;
3029
3030    memset(mParameters, 0, sizeof(parm_buffer_t));
3031    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3032    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3033                sizeof(hal_version), &hal_version);
3034    if (rc < 0) {
3035        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3036        return BAD_VALUE;
3037    }
3038
3039    /*we need to update the frame number in the parameters*/
3040    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3041                                sizeof(request->frame_number), &(request->frame_number));
3042    if (rc < 0) {
3043        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3044        return BAD_VALUE;
3045    }
3046
3047    /* Update stream id mask where buffers are requested */
3048    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3049                                sizeof(streamTypeMask), &streamTypeMask);
3050    if (rc < 0) {
3051        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3052        return BAD_VALUE;
3053    }
3054
3055    if(request->settings != NULL){
3056        rc = translateMetadataToParameters(request);
3057    }
3058    /*set the parameters to backend*/
3059    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3060    return rc;
3061}
3062
3063/*===========================================================================
3064 * FUNCTION   : translateMetadataToParameters
3065 *
3066 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3067 *
3068 *
3069 * PARAMETERS :
3070 *   @request  : request sent from framework
3071 *
3072 *
3073 * RETURN     : success: NO_ERROR
3074 *              failure:
3075 *==========================================================================*/
3076int QCamera3HardwareInterface::translateMetadataToParameters
3077                                  (const camera3_capture_request_t *request)
3078{
3079    int rc = 0;
3080    CameraMetadata frame_settings;
3081    frame_settings = request->settings;
3082
3083    /* Do not change the order of the following list unless you know what you are
3084     * doing.
3085     * The order is laid out in such a way that parameters in the front of the table
3086     * may be used to override the parameters later in the table. Examples are:
3087     * 1. META_MODE should precede AEC/AWB/AF MODE
3088     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3089     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3090     * 4. Any mode should precede it's corresponding settings
3091     */
3092    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3093        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3094        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3095                sizeof(metaMode), &metaMode);
3096        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3097           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3098           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3099                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3100                                             fwk_sceneMode);
3101           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3102                sizeof(sceneMode), &sceneMode);
3103        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3104           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3105           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3106                sizeof(sceneMode), &sceneMode);
3107        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3108           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3109           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3110                sizeof(sceneMode), &sceneMode);
3111        }
3112    }
3113
3114    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3115        uint8_t fwk_aeMode =
3116            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3117        uint8_t aeMode;
3118        int32_t redeye;
3119
3120        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3121            aeMode = CAM_AE_MODE_OFF;
3122        } else {
3123            aeMode = CAM_AE_MODE_ON;
3124        }
3125        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3126            redeye = 1;
3127        } else {
3128            redeye = 0;
3129        }
3130
3131        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3132                                          sizeof(AE_FLASH_MODE_MAP),
3133                                          fwk_aeMode);
3134        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3135                sizeof(aeMode), &aeMode);
3136        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3137                sizeof(flashMode), &flashMode);
3138        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3139                sizeof(redeye), &redeye);
3140    }
3141
3142    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3143        uint8_t fwk_whiteLevel =
3144            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3145        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3146                sizeof(WHITE_BALANCE_MODES_MAP),
3147                fwk_whiteLevel);
3148        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3149                sizeof(whiteLevel), &whiteLevel);
3150    }
3151
3152    float focalDistance = -1.0;
3153    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3154        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3155        rc = AddSetParmEntryToBatch(mParameters,
3156                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3157                sizeof(focalDistance), &focalDistance);
3158    }
3159
3160    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3161        uint8_t fwk_focusMode =
3162            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3163        uint8_t focusMode;
3164        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3165            focusMode = CAM_FOCUS_MODE_INFINITY;
3166        } else{
3167         focusMode = lookupHalName(FOCUS_MODES_MAP,
3168                                   sizeof(FOCUS_MODES_MAP),
3169                                   fwk_focusMode);
3170        }
3171        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3172                sizeof(focusMode), &focusMode);
3173    }
3174
3175    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3176        int32_t antibandingMode =
3177            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3178        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3179                sizeof(antibandingMode), &antibandingMode);
3180    }
3181
3182    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3183        int32_t expCompensation = frame_settings.find(
3184            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3185        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3186            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3187        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3188            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3189        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3190          sizeof(expCompensation), &expCompensation);
3191    }
3192
3193    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3194        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3195        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3196                sizeof(aeLock), &aeLock);
3197    }
3198    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3199        cam_fps_range_t fps_range;
3200        fps_range.min_fps =
3201            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3202        fps_range.max_fps =
3203            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3204        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3205                sizeof(fps_range), &fps_range);
3206    }
3207
3208    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3209        uint8_t awbLock =
3210            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3211        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3212                sizeof(awbLock), &awbLock);
3213    }
3214
3215    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3216        uint8_t fwk_effectMode =
3217            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3218        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3219                sizeof(EFFECT_MODES_MAP),
3220                fwk_effectMode);
3221        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3222                sizeof(effectMode), &effectMode);
3223    }
3224
3225    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3226        uint8_t colorCorrectMode =
3227            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3228        rc =
3229            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3230                    sizeof(colorCorrectMode), &colorCorrectMode);
3231    }
3232
3233    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3234        cam_color_correct_gains_t colorCorrectGains;
3235        for (int i = 0; i < 4; i++) {
3236            colorCorrectGains.gains[i] =
3237                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3238        }
3239        rc =
3240            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3241                    sizeof(colorCorrectGains), &colorCorrectGains);
3242    }
3243
3244    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3245        cam_color_correct_matrix_t colorCorrectTransform;
3246        cam_rational_type_t transform_elem;
3247        int num = 0;
3248        for (int i = 0; i < 3; i++) {
3249           for (int j = 0; j < 3; j++) {
3250              transform_elem.numerator =
3251                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3252              transform_elem.denominator =
3253                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3254              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3255              num++;
3256           }
3257        }
3258        rc =
3259            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3260                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3261    }
3262
3263    cam_trigger_t aecTrigger;
3264    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3265    aecTrigger.trigger_id = -1;
3266    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3267        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3268        aecTrigger.trigger =
3269            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3270        aecTrigger.trigger_id =
3271            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3272    }
3273    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3274                                sizeof(aecTrigger), &aecTrigger);
3275
3276    /*af_trigger must come with a trigger id*/
3277    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3278        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3279        cam_trigger_t af_trigger;
3280        af_trigger.trigger =
3281            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3282        af_trigger.trigger_id =
3283            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3284        rc = AddSetParmEntryToBatch(mParameters,
3285                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3286    }
3287
3288    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3289        int32_t demosaic =
3290            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3291        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3292                sizeof(demosaic), &demosaic);
3293    }
3294
3295    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3296        cam_edge_application_t edge_application;
3297        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3298        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3299            edge_application.sharpness = 0;
3300        } else {
3301            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3302                int32_t edgeStrength =
3303                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3304                edge_application.sharpness = edgeStrength;
3305            } else {
3306                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3307            }
3308        }
3309        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3310                sizeof(edge_application), &edge_application);
3311    }
3312
3313    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3314        int32_t respectFlashMode = 1;
3315        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3316            uint8_t fwk_aeMode =
3317                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3318            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3319                respectFlashMode = 0;
3320                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3321                    __func__);
3322            }
3323        }
3324        if (respectFlashMode) {
3325            uint8_t flashMode =
3326                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3327            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3328                                          sizeof(FLASH_MODES_MAP),
3329                                          flashMode);
3330            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3331            // To check: CAM_INTF_META_FLASH_MODE usage
3332            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3333                          sizeof(flashMode), &flashMode);
3334        }
3335    }
3336
3337    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3338        uint8_t flashPower =
3339            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3340        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3341                sizeof(flashPower), &flashPower);
3342    }
3343
3344    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3345        int64_t flashFiringTime =
3346            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3347        rc = AddSetParmEntryToBatch(mParameters,
3348                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3349    }
3350
3351    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3352        uint8_t geometricMode =
3353            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3354        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3355                sizeof(geometricMode), &geometricMode);
3356    }
3357
3358    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3359        uint8_t geometricStrength =
3360            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3361        rc = AddSetParmEntryToBatch(mParameters,
3362                CAM_INTF_META_GEOMETRIC_STRENGTH,
3363                sizeof(geometricStrength), &geometricStrength);
3364    }
3365
3366    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3367        uint8_t hotPixelMode =
3368            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3369        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3370                sizeof(hotPixelMode), &hotPixelMode);
3371    }
3372
3373    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3374        float lensAperture =
3375            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3376        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3377                sizeof(lensAperture), &lensAperture);
3378    }
3379
3380    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3381        float filterDensity =
3382            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3383        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3384                sizeof(filterDensity), &filterDensity);
3385    }
3386
3387    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3388        float focalLength =
3389            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3390        rc = AddSetParmEntryToBatch(mParameters,
3391                CAM_INTF_META_LENS_FOCAL_LENGTH,
3392                sizeof(focalLength), &focalLength);
3393    }
3394
3395    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3396        uint8_t optStabMode =
3397            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3398        rc = AddSetParmEntryToBatch(mParameters,
3399                CAM_INTF_META_LENS_OPT_STAB_MODE,
3400                sizeof(optStabMode), &optStabMode);
3401    }
3402
3403    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3404        uint8_t noiseRedMode =
3405            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3406        rc = AddSetParmEntryToBatch(mParameters,
3407                CAM_INTF_META_NOISE_REDUCTION_MODE,
3408                sizeof(noiseRedMode), &noiseRedMode);
3409    }
3410
3411    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3412        uint8_t noiseRedStrength =
3413            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3414        rc = AddSetParmEntryToBatch(mParameters,
3415                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3416                sizeof(noiseRedStrength), &noiseRedStrength);
3417    }
3418
3419    cam_crop_region_t scalerCropRegion;
3420    bool scalerCropSet = false;
3421    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3422        scalerCropRegion.left =
3423            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3424        scalerCropRegion.top =
3425            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3426        scalerCropRegion.width =
3427            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3428        scalerCropRegion.height =
3429            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3430        rc = AddSetParmEntryToBatch(mParameters,
3431                CAM_INTF_META_SCALER_CROP_REGION,
3432                sizeof(scalerCropRegion), &scalerCropRegion);
3433        scalerCropSet = true;
3434    }
3435
3436    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3437        int64_t sensorExpTime =
3438            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3439        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3440        rc = AddSetParmEntryToBatch(mParameters,
3441                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3442                sizeof(sensorExpTime), &sensorExpTime);
3443    }
3444
3445    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3446        int64_t sensorFrameDuration =
3447            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3448        int64_t minFrameDuration = getMinFrameDuration(request);
3449        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3450        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3451            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3452        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3453        rc = AddSetParmEntryToBatch(mParameters,
3454                CAM_INTF_META_SENSOR_FRAME_DURATION,
3455                sizeof(sensorFrameDuration), &sensorFrameDuration);
3456    }
3457
3458    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3459        int32_t sensorSensitivity =
3460            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3461        if (sensorSensitivity <
3462                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3463            sensorSensitivity =
3464                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3465        if (sensorSensitivity >
3466                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3467            sensorSensitivity =
3468                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3469        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3470        rc = AddSetParmEntryToBatch(mParameters,
3471                CAM_INTF_META_SENSOR_SENSITIVITY,
3472                sizeof(sensorSensitivity), &sensorSensitivity);
3473    }
3474
3475    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3476        int32_t shadingMode =
3477            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3478        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3479                sizeof(shadingMode), &shadingMode);
3480    }
3481
3482    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3483        uint8_t shadingStrength =
3484            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3485        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3486                sizeof(shadingStrength), &shadingStrength);
3487    }
3488
3489    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3490        uint8_t fwk_facedetectMode =
3491            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3492        uint8_t facedetectMode =
3493            lookupHalName(FACEDETECT_MODES_MAP,
3494                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3495        rc = AddSetParmEntryToBatch(mParameters,
3496                CAM_INTF_META_STATS_FACEDETECT_MODE,
3497                sizeof(facedetectMode), &facedetectMode);
3498    }
3499
3500    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3501        uint8_t histogramMode =
3502            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3503        rc = AddSetParmEntryToBatch(mParameters,
3504                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3505                sizeof(histogramMode), &histogramMode);
3506    }
3507
3508    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3509        uint8_t sharpnessMapMode =
3510            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3511        rc = AddSetParmEntryToBatch(mParameters,
3512                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3513                sizeof(sharpnessMapMode), &sharpnessMapMode);
3514    }
3515
3516    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3517        uint8_t tonemapMode =
3518            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3519        rc = AddSetParmEntryToBatch(mParameters,
3520                CAM_INTF_META_TONEMAP_MODE,
3521                sizeof(tonemapMode), &tonemapMode);
3522    }
3523    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3524    /*All tonemap channels will have the same number of points*/
3525    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3526        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3527        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3528        cam_rgb_tonemap_curves tonemapCurves;
3529        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3530
3531        /* ch0 = G*/
3532        int point = 0;
3533        cam_tonemap_curve_t tonemapCurveGreen;
3534        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3535            for (int j = 0; j < 2; j++) {
3536               tonemapCurveGreen.tonemap_points[i][j] =
3537                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3538               point++;
3539            }
3540        }
3541        tonemapCurves.curves[0] = tonemapCurveGreen;
3542
3543        /* ch 1 = B */
3544        point = 0;
3545        cam_tonemap_curve_t tonemapCurveBlue;
3546        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3547            for (int j = 0; j < 2; j++) {
3548               tonemapCurveBlue.tonemap_points[i][j] =
3549                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3550               point++;
3551            }
3552        }
3553        tonemapCurves.curves[1] = tonemapCurveBlue;
3554
3555        /* ch 2 = R */
3556        point = 0;
3557        cam_tonemap_curve_t tonemapCurveRed;
3558        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3559            for (int j = 0; j < 2; j++) {
3560               tonemapCurveRed.tonemap_points[i][j] =
3561                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3562               point++;
3563            }
3564        }
3565        tonemapCurves.curves[2] = tonemapCurveRed;
3566
3567        rc = AddSetParmEntryToBatch(mParameters,
3568                CAM_INTF_META_TONEMAP_CURVES,
3569                sizeof(tonemapCurves), &tonemapCurves);
3570    }
3571
3572    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3573        uint8_t captureIntent =
3574            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3575        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3576                sizeof(captureIntent), &captureIntent);
3577    }
3578
3579    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3580        uint8_t blackLevelLock =
3581            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3582        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3583                sizeof(blackLevelLock), &blackLevelLock);
3584    }
3585
3586    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3587        uint8_t lensShadingMapMode =
3588            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3589        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3590                sizeof(lensShadingMapMode), &lensShadingMapMode);
3591    }
3592
3593    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3594        cam_area_t roi;
3595        bool reset = true;
3596        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3597        if (scalerCropSet) {
3598            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3599        }
3600        if (reset) {
3601            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3602                    sizeof(roi), &roi);
3603        }
3604    }
3605
3606    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3607        cam_area_t roi;
3608        bool reset = true;
3609        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3610        if (scalerCropSet) {
3611            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3612        }
3613        if (reset) {
3614            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3615                    sizeof(roi), &roi);
3616        }
3617    }
3618
3619    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3620        cam_area_t roi;
3621        bool reset = true;
3622        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3623        if (scalerCropSet) {
3624            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3625        }
3626        if (reset) {
3627            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3628                    sizeof(roi), &roi);
3629        }
3630    }
3631    return rc;
3632}
3633
3634/*===========================================================================
3635 * FUNCTION   : getJpegSettings
3636 *
3637 * DESCRIPTION: save the jpeg settings in the HAL
3638 *
3639 *
3640 * PARAMETERS :
3641 *   @settings  : frame settings information from framework
3642 *
3643 *
3644 * RETURN     : success: NO_ERROR
3645 *              failure:
3646 *==========================================================================*/
3647int QCamera3HardwareInterface::getJpegSettings
3648                                  (const camera_metadata_t *settings)
3649{
3650    if (mJpegSettings) {
3651        if (mJpegSettings->gps_timestamp) {
3652            free(mJpegSettings->gps_timestamp);
3653            mJpegSettings->gps_timestamp = NULL;
3654        }
3655        if (mJpegSettings->gps_coordinates) {
3656            for (int i = 0; i < 3; i++) {
3657                free(mJpegSettings->gps_coordinates[i]);
3658                mJpegSettings->gps_coordinates[i] = NULL;
3659            }
3660        }
3661        free(mJpegSettings);
3662        mJpegSettings = NULL;
3663    }
3664    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3665    CameraMetadata jpeg_settings;
3666    jpeg_settings = settings;
3667
3668    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3669        mJpegSettings->jpeg_orientation =
3670            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3671    } else {
3672        mJpegSettings->jpeg_orientation = 0;
3673    }
3674    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3675        mJpegSettings->jpeg_quality =
3676            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3677    } else {
3678        mJpegSettings->jpeg_quality = 85;
3679    }
3680    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3681        mJpegSettings->thumbnail_size.width =
3682            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3683        mJpegSettings->thumbnail_size.height =
3684            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3685    } else {
3686        mJpegSettings->thumbnail_size.width = 0;
3687        mJpegSettings->thumbnail_size.height = 0;
3688    }
3689    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3690        for (int i = 0; i < 3; i++) {
3691            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3692            *(mJpegSettings->gps_coordinates[i]) =
3693                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3694        }
3695    } else{
3696       for (int i = 0; i < 3; i++) {
3697            mJpegSettings->gps_coordinates[i] = NULL;
3698        }
3699    }
3700
3701    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3702        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3703        *(mJpegSettings->gps_timestamp) =
3704            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3705    } else {
3706        mJpegSettings->gps_timestamp = NULL;
3707    }
3708
3709    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3710        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3711        for (int i = 0; i < len; i++) {
3712            mJpegSettings->gps_processing_method[i] =
3713                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3714        }
3715        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3716            mJpegSettings->gps_processing_method[len] = '\0';
3717        }
3718    } else {
3719        mJpegSettings->gps_processing_method[0] = '\0';
3720    }
3721
3722    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3723        mJpegSettings->sensor_sensitivity =
3724            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3725    } else {
3726        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3727    }
3728
3729    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3730
3731    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3732        mJpegSettings->lens_focal_length =
3733            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3734    }
3735    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3736        mJpegSettings->exposure_compensation =
3737            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3738    }
3739    mJpegSettings->sharpness = 10; //default value
3740    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3741        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3742        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3743            mJpegSettings->sharpness = 0;
3744        }
3745    }
3746    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3747    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3748    mJpegSettings->is_jpeg_format = true;
3749    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3750    return 0;
3751}
3752
3753/*===========================================================================
3754 * FUNCTION   : captureResultCb
3755 *
3756 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3757 *
3758 * PARAMETERS :
3759 *   @frame  : frame information from mm-camera-interface
3760 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3761 *   @userdata: userdata
3762 *
3763 * RETURN     : NONE
3764 *==========================================================================*/
3765void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3766                camera3_stream_buffer_t *buffer,
3767                uint32_t frame_number, void *userdata)
3768{
3769    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3770    if (hw == NULL) {
3771        ALOGE("%s: Invalid hw %p", __func__, hw);
3772        return;
3773    }
3774
3775    hw->captureResultCb(metadata, buffer, frame_number);
3776    return;
3777}
3778
3779
3780/*===========================================================================
3781 * FUNCTION   : initialize
3782 *
3783 * DESCRIPTION: Pass framework callback pointers to HAL
3784 *
3785 * PARAMETERS :
3786 *
3787 *
3788 * RETURN     : Success : 0
3789 *              Failure: -ENODEV
3790 *==========================================================================*/
3791
3792int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3793                                  const camera3_callback_ops_t *callback_ops)
3794{
3795    ALOGV("%s: E", __func__);
3796    QCamera3HardwareInterface *hw =
3797        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3798    if (!hw) {
3799        ALOGE("%s: NULL camera device", __func__);
3800        return -ENODEV;
3801    }
3802
3803    int rc = hw->initialize(callback_ops);
3804    ALOGV("%s: X", __func__);
3805    return rc;
3806}
3807
3808/*===========================================================================
3809 * FUNCTION   : configure_streams
3810 *
3811 * DESCRIPTION:
3812 *
3813 * PARAMETERS :
3814 *
3815 *
3816 * RETURN     : Success: 0
3817 *              Failure: -EINVAL (if stream configuration is invalid)
3818 *                       -ENODEV (fatal error)
3819 *==========================================================================*/
3820
3821int QCamera3HardwareInterface::configure_streams(
3822        const struct camera3_device *device,
3823        camera3_stream_configuration_t *stream_list)
3824{
3825    ALOGV("%s: E", __func__);
3826    QCamera3HardwareInterface *hw =
3827        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3828    if (!hw) {
3829        ALOGE("%s: NULL camera device", __func__);
3830        return -ENODEV;
3831    }
3832    int rc = hw->configureStreams(stream_list);
3833    ALOGV("%s: X", __func__);
3834    return rc;
3835}
3836
3837/*===========================================================================
3838 * FUNCTION   : register_stream_buffers
3839 *
3840 * DESCRIPTION: Register stream buffers with the device
3841 *
3842 * PARAMETERS :
3843 *
3844 * RETURN     :
3845 *==========================================================================*/
3846int QCamera3HardwareInterface::register_stream_buffers(
3847        const struct camera3_device *device,
3848        const camera3_stream_buffer_set_t *buffer_set)
3849{
3850    ALOGV("%s: E", __func__);
3851    QCamera3HardwareInterface *hw =
3852        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3853    if (!hw) {
3854        ALOGE("%s: NULL camera device", __func__);
3855        return -ENODEV;
3856    }
3857    int rc = hw->registerStreamBuffers(buffer_set);
3858    ALOGV("%s: X", __func__);
3859    return rc;
3860}
3861
3862/*===========================================================================
3863 * FUNCTION   : construct_default_request_settings
3864 *
3865 * DESCRIPTION: Configure a settings buffer to meet the required use case
3866 *
3867 * PARAMETERS :
3868 *
3869 *
3870 * RETURN     : Success: Return valid metadata
3871 *              Failure: Return NULL
3872 *==========================================================================*/
3873const camera_metadata_t* QCamera3HardwareInterface::
3874    construct_default_request_settings(const struct camera3_device *device,
3875                                        int type)
3876{
3877
3878    ALOGV("%s: E", __func__);
3879    camera_metadata_t* fwk_metadata = NULL;
3880    QCamera3HardwareInterface *hw =
3881        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3882    if (!hw) {
3883        ALOGE("%s: NULL camera device", __func__);
3884        return NULL;
3885    }
3886
3887    fwk_metadata = hw->translateCapabilityToMetadata(type);
3888
3889    ALOGV("%s: X", __func__);
3890    return fwk_metadata;
3891}
3892
3893/*===========================================================================
3894 * FUNCTION   : process_capture_request
3895 *
3896 * DESCRIPTION:
3897 *
3898 * PARAMETERS :
3899 *
3900 *
3901 * RETURN     :
3902 *==========================================================================*/
3903int QCamera3HardwareInterface::process_capture_request(
3904                    const struct camera3_device *device,
3905                    camera3_capture_request_t *request)
3906{
3907    ALOGV("%s: E", __func__);
3908    QCamera3HardwareInterface *hw =
3909        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3910    if (!hw) {
3911        ALOGE("%s: NULL camera device", __func__);
3912        return -EINVAL;
3913    }
3914
3915    int rc = hw->processCaptureRequest(request);
3916    ALOGV("%s: X", __func__);
3917    return rc;
3918}
3919
3920/*===========================================================================
3921 * FUNCTION   : get_metadata_vendor_tag_ops
3922 *
3923 * DESCRIPTION:
3924 *
3925 * PARAMETERS :
3926 *
3927 *
3928 * RETURN     :
3929 *==========================================================================*/
3930
3931void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3932                const struct camera3_device *device,
3933                vendor_tag_query_ops_t* ops)
3934{
3935    ALOGV("%s: E", __func__);
3936    QCamera3HardwareInterface *hw =
3937        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3938    if (!hw) {
3939        ALOGE("%s: NULL camera device", __func__);
3940        return;
3941    }
3942
3943    hw->getMetadataVendorTagOps(ops);
3944    ALOGV("%s: X", __func__);
3945    return;
3946}
3947
3948/*===========================================================================
3949 * FUNCTION   : dump
3950 *
3951 * DESCRIPTION:
3952 *
3953 * PARAMETERS :
3954 *
3955 *
3956 * RETURN     :
3957 *==========================================================================*/
3958
3959void QCamera3HardwareInterface::dump(
3960                const struct camera3_device *device, int fd)
3961{
3962    ALOGV("%s: E", __func__);
3963    QCamera3HardwareInterface *hw =
3964        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3965    if (!hw) {
3966        ALOGE("%s: NULL camera device", __func__);
3967        return;
3968    }
3969
3970    hw->dump(fd);
3971    ALOGV("%s: X", __func__);
3972    return;
3973}
3974
3975/*===========================================================================
3976 * FUNCTION   : flush
3977 *
3978 * DESCRIPTION:
3979 *
3980 * PARAMETERS :
3981 *
3982 *
3983 * RETURN     :
3984 *==========================================================================*/
3985
3986int QCamera3HardwareInterface::flush(
3987                const struct camera3_device *device)
3988{
3989    int rc;
3990    ALOGV("%s: E", __func__);
3991    QCamera3HardwareInterface *hw =
3992        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3993    if (!hw) {
3994        ALOGE("%s: NULL camera device", __func__);
3995        return -EINVAL;
3996    }
3997
3998    rc = hw->flush();
3999    ALOGV("%s: X", __func__);
4000    return rc;
4001}
4002
4003/*===========================================================================
4004 * FUNCTION   : close_camera_device
4005 *
4006 * DESCRIPTION:
4007 *
4008 * PARAMETERS :
4009 *
4010 *
4011 * RETURN     :
4012 *==========================================================================*/
4013int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4014{
4015    ALOGV("%s: E", __func__);
4016    int ret = NO_ERROR;
4017    QCamera3HardwareInterface *hw =
4018        reinterpret_cast<QCamera3HardwareInterface *>(
4019            reinterpret_cast<camera3_device_t *>(device)->priv);
4020    if (!hw) {
4021        ALOGE("NULL camera device");
4022        return BAD_VALUE;
4023    }
4024    delete hw;
4025
4026    pthread_mutex_lock(&mCameraSessionLock);
4027    mCameraSessionActive = 0;
4028    pthread_mutex_unlock(&mCameraSessionLock);
4029    ALOGV("%s: X", __func__);
4030    return ret;
4031}
4032
4033/*===========================================================================
4034 * FUNCTION   : getWaveletDenoiseProcessPlate
4035 *
4036 * DESCRIPTION: query wavelet denoise process plate
4037 *
4038 * PARAMETERS : None
4039 *
4040 * RETURN     : WNR prcocess plate vlaue
4041 *==========================================================================*/
4042cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4043{
4044    char prop[PROPERTY_VALUE_MAX];
4045    memset(prop, 0, sizeof(prop));
4046    property_get("persist.denoise.process.plates", prop, "0");
4047    int processPlate = atoi(prop);
4048    switch(processPlate) {
4049    case 0:
4050        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4051    case 1:
4052        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4053    case 2:
4054        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4055    case 3:
4056        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4057    default:
4058        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4059    }
4060}
4061
4062/*===========================================================================
4063 * FUNCTION   : needRotationReprocess
4064 *
4065 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4066 *
4067 * PARAMETERS : none
4068 *
4069 * RETURN     : true: needed
4070 *              false: no need
4071 *==========================================================================*/
4072bool QCamera3HardwareInterface::needRotationReprocess()
4073{
4074
4075    if (!mJpegSettings->is_jpeg_format) {
4076        // RAW image, no need to reprocess
4077        return false;
4078    }
4079
4080    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4081        mJpegSettings->jpeg_orientation > 0) {
4082        // current rotation is not zero, and pp has the capability to process rotation
4083        ALOGD("%s: need do reprocess for rotation", __func__);
4084        return true;
4085    }
4086
4087    return false;
4088}
4089
4090/*===========================================================================
4091 * FUNCTION   : needReprocess
4092 *
4093 * DESCRIPTION: if reprocess in needed
4094 *
4095 * PARAMETERS : none
4096 *
4097 * RETURN     : true: needed
4098 *              false: no need
4099 *==========================================================================*/
4100bool QCamera3HardwareInterface::needReprocess()
4101{
4102    if (!mJpegSettings->is_jpeg_format) {
4103        // RAW image, no need to reprocess
4104        return false;
4105    }
4106
4107    if ((mJpegSettings->min_required_pp_mask > 0) ||
4108         isWNREnabled()) {
4109        // TODO: add for ZSL HDR later
4110        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4111        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4112        return true;
4113    }
4114    return needRotationReprocess();
4115}
4116
4117/*===========================================================================
4118 * FUNCTION   : addOnlineReprocChannel
4119 *
4120 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4121 *              coming from input channel
4122 *
4123 * PARAMETERS :
4124 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4125 *
4126 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4127 *==========================================================================*/
4128QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4129              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4130{
4131    int32_t rc = NO_ERROR;
4132    QCamera3ReprocessChannel *pChannel = NULL;
4133    if (pInputChannel == NULL) {
4134        ALOGE("%s: input channel obj is NULL", __func__);
4135        return NULL;
4136    }
4137
4138    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4139            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4140    if (NULL == pChannel) {
4141        ALOGE("%s: no mem for reprocess channel", __func__);
4142        return NULL;
4143    }
4144
4145    // Capture channel, only need snapshot and postview streams start together
4146    mm_camera_channel_attr_t attr;
4147    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4148    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4149    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4150    rc = pChannel->initialize();
4151    if (rc != NO_ERROR) {
4152        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4153        delete pChannel;
4154        return NULL;
4155    }
4156
4157    // pp feature config
4158    cam_pp_feature_config_t pp_config;
4159    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4160    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4161        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4162        pp_config.sharpness = mJpegSettings->sharpness;
4163    }
4164
4165    if (isWNREnabled()) {
4166        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4167        pp_config.denoise2d.denoise_enable = 1;
4168        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4169    }
4170    if (needRotationReprocess()) {
4171        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4172        int rotation = mJpegSettings->jpeg_orientation;
4173        if (rotation == 0) {
4174            pp_config.rotation = ROTATE_0;
4175        } else if (rotation == 90) {
4176            pp_config.rotation = ROTATE_90;
4177        } else if (rotation == 180) {
4178            pp_config.rotation = ROTATE_180;
4179        } else if (rotation == 270) {
4180            pp_config.rotation = ROTATE_270;
4181        }
4182    }
4183
4184   rc = pChannel->addReprocStreamsFromSource(pp_config,
4185                                             pInputChannel,
4186                                             mMetadataChannel);
4187
4188    if (rc != NO_ERROR) {
4189        delete pChannel;
4190        return NULL;
4191    }
4192    return pChannel;
4193}
4194
4195int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4196{
4197    return gCamCapability[mCameraId]->min_num_pp_bufs;
4198}
4199
4200bool QCamera3HardwareInterface::isWNREnabled() {
4201    return gCamCapability[mCameraId]->isWnrSupported;
4202}
4203
4204}; //end namespace qcamera
4205