QCamera3HWI.cpp revision 17c34c93d837a49f778ed0294c3670a30f10396e
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if ((*it)->registered && (*it)->buffer_set.buffers) {
240             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
241        }
242        if (channel)
243            delete channel;
244        free (*it);
245    }
246
247    mPictureChannel = NULL;
248
249    if (mJpegSettings != NULL) {
250        free(mJpegSettings);
251        mJpegSettings = NULL;
252    }
253
254    /* Clean up all channels */
255    if (mCameraInitialized) {
256        if (mMetadataChannel) {
257            mMetadataChannel->stop();
258            delete mMetadataChannel;
259            mMetadataChannel = NULL;
260        }
261        deinitParameters();
262    }
263
264    if (mCameraOpened)
265        closeCamera();
266
267    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
268        if (mDefaultMetadata[i])
269            free_camera_metadata(mDefaultMetadata[i]);
270
271    pthread_cond_destroy(&mRequestCond);
272
273    pthread_mutex_destroy(&mMutex);
274    ALOGV("%s: X", __func__);
275}
276
277/*===========================================================================
278 * FUNCTION   : openCamera
279 *
280 * DESCRIPTION: open camera
281 *
282 * PARAMETERS :
283 *   @hw_device  : double ptr for camera device struct
284 *
285 * RETURN     : int32_t type of status
286 *              NO_ERROR  -- success
287 *              none-zero failure code
288 *==========================================================================*/
289int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
290{
291    int rc = 0;
292    pthread_mutex_lock(&mCameraSessionLock);
293    if (mCameraSessionActive) {
294        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
295        pthread_mutex_unlock(&mCameraSessionLock);
296        return -EDQUOT;
297    }
298
299    if (mCameraOpened) {
300        *hw_device = NULL;
301        return PERMISSION_DENIED;
302    }
303
304    rc = openCamera();
305    if (rc == 0) {
306        *hw_device = &mCameraDevice.common;
307        mCameraSessionActive = 1;
308    } else
309        *hw_device = NULL;
310
311#ifdef HAS_MULTIMEDIA_HINTS
312    if (rc == 0) {
313        if (m_pPowerModule) {
314            if (m_pPowerModule->powerHint) {
315                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
316                        (void *)"state=1");
317            }
318        }
319    }
320#endif
321    pthread_mutex_unlock(&mCameraSessionLock);
322    return rc;
323}
324
325/*===========================================================================
326 * FUNCTION   : openCamera
327 *
328 * DESCRIPTION: open camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::openCamera()
337{
338    if (mCameraHandle) {
339        ALOGE("Failure: Camera already opened");
340        return ALREADY_EXISTS;
341    }
342    mCameraHandle = camera_open(mCameraId);
343    if (!mCameraHandle) {
344        ALOGE("camera_open failed.");
345        return UNKNOWN_ERROR;
346    }
347
348    mCameraOpened = true;
349
350    return NO_ERROR;
351}
352
353/*===========================================================================
354 * FUNCTION   : closeCamera
355 *
356 * DESCRIPTION: close camera
357 *
358 * PARAMETERS : none
359 *
360 * RETURN     : int32_t type of status
361 *              NO_ERROR  -- success
362 *              none-zero failure code
363 *==========================================================================*/
364int QCamera3HardwareInterface::closeCamera()
365{
366    int rc = NO_ERROR;
367
368    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
369    mCameraHandle = NULL;
370    mCameraOpened = false;
371
372#ifdef HAS_MULTIMEDIA_HINTS
373    if (rc == NO_ERROR) {
374        if (m_pPowerModule) {
375            if (m_pPowerModule->powerHint) {
376                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
377                        (void *)"state=0");
378            }
379        }
380    }
381#endif
382
383    return rc;
384}
385
386/*===========================================================================
387 * FUNCTION   : initialize
388 *
389 * DESCRIPTION: Initialize frameworks callback functions
390 *
391 * PARAMETERS :
392 *   @callback_ops : callback function to frameworks
393 *
394 * RETURN     :
395 *
396 *==========================================================================*/
397int QCamera3HardwareInterface::initialize(
398        const struct camera3_callback_ops *callback_ops)
399{
400    int rc;
401
402    pthread_mutex_lock(&mMutex);
403
404    rc = initParameters();
405    if (rc < 0) {
406        ALOGE("%s: initParamters failed %d", __func__, rc);
407       goto err1;
408    }
409    mCallbackOps = callback_ops;
410
411    pthread_mutex_unlock(&mMutex);
412    mCameraInitialized = true;
413    return 0;
414
415err1:
416    pthread_mutex_unlock(&mMutex);
417    return rc;
418}
419
420/*===========================================================================
421 * FUNCTION   : configureStreams
422 *
423 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
424 *              and output streams.
425 *
426 * PARAMETERS :
427 *   @stream_list : streams to be configured
428 *
429 * RETURN     :
430 *
431 *==========================================================================*/
432int QCamera3HardwareInterface::configureStreams(
433        camera3_stream_configuration_t *streamList)
434{
435    int rc = 0;
436    mIsZslMode = false;
437
438    // Sanity check stream_list
439    if (streamList == NULL) {
440        ALOGE("%s: NULL stream configuration", __func__);
441        return BAD_VALUE;
442    }
443    if (streamList->streams == NULL) {
444        ALOGE("%s: NULL stream list", __func__);
445        return BAD_VALUE;
446    }
447
448    if (streamList->num_streams < 1) {
449        ALOGE("%s: Bad number of streams requested: %d", __func__,
450                streamList->num_streams);
451        return BAD_VALUE;
452    }
453
454    /* first invalidate all the steams in the mStreamList
455     * if they appear again, they will be validated */
456    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
457            it != mStreamInfo.end(); it++) {
458        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
459        channel->stop();
460        (*it)->status = INVALID;
461    }
462    if (mMetadataChannel) {
463        /* If content of mStreamInfo is not 0, there is metadata stream */
464        mMetadataChannel->stop();
465    }
466
467    pthread_mutex_lock(&mMutex);
468
469    camera3_stream_t *inputStream = NULL;
470    camera3_stream_t *jpegStream = NULL;
471    cam_stream_size_info_t stream_config_info;
472
473    for (size_t i = 0; i < streamList->num_streams; i++) {
474        camera3_stream_t *newStream = streamList->streams[i];
475        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
476                __func__, newStream->stream_type, newStream->format,
477                 newStream->width, newStream->height);
478        //if the stream is in the mStreamList validate it
479        bool stream_exists = false;
480        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
481                it != mStreamInfo.end(); it++) {
482            if ((*it)->stream == newStream) {
483                QCamera3Channel *channel =
484                    (QCamera3Channel*)(*it)->stream->priv;
485                stream_exists = true;
486                (*it)->status = RECONFIGURE;
487                /*delete the channel object associated with the stream because
488                  we need to reconfigure*/
489                delete channel;
490                (*it)->stream->priv = NULL;
491            }
492        }
493        if (!stream_exists) {
494            //new stream
495            stream_info_t* stream_info;
496            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
497            stream_info->stream = newStream;
498            stream_info->status = VALID;
499            stream_info->registered = 0;
500            mStreamInfo.push_back(stream_info);
501        }
502        if (newStream->stream_type == CAMERA3_STREAM_INPUT
503                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
504            if (inputStream != NULL) {
505                ALOGE("%s: Multiple input streams requested!", __func__);
506                pthread_mutex_unlock(&mMutex);
507                return BAD_VALUE;
508            }
509            inputStream = newStream;
510        }
511        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
512            jpegStream = newStream;
513        }
514    }
515    mInputStream = inputStream;
516
517    /*clean up invalid streams*/
518    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
519            it != mStreamInfo.end();) {
520        if(((*it)->status) == INVALID){
521            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
522            delete channel;
523            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
524            free(*it);
525            it = mStreamInfo.erase(it);
526        } else {
527            it++;
528        }
529    }
530    if (mMetadataChannel) {
531        delete mMetadataChannel;
532        mMetadataChannel = NULL;
533    }
534
535    //Create metadata channel and initialize it
536    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
537                    mCameraHandle->ops, captureResultCb,
538                    &gCamCapability[mCameraId]->padding_info, this);
539    if (mMetadataChannel == NULL) {
540        ALOGE("%s: failed to allocate metadata channel", __func__);
541        rc = -ENOMEM;
542        pthread_mutex_unlock(&mMutex);
543        return rc;
544    }
545    rc = mMetadataChannel->initialize();
546    if (rc < 0) {
547        ALOGE("%s: metadata channel initialization failed", __func__);
548        delete mMetadataChannel;
549        pthread_mutex_unlock(&mMutex);
550        return rc;
551    }
552
553    /* Allocate channel objects for the requested streams */
554    for (size_t i = 0; i < streamList->num_streams; i++) {
555        camera3_stream_t *newStream = streamList->streams[i];
556        uint32_t stream_usage = newStream->usage;
557        stream_config_info.stream_sizes[i].width = newStream->width;
558        stream_config_info.stream_sizes[i].height = newStream->height;
559        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
560            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
561            //for zsl stream the size is jpeg size
562            stream_config_info.stream_sizes[i].width = jpegStream->width;
563            stream_config_info.stream_sizes[i].height = jpegStream->height;
564            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
565        } else {
566           //for non zsl streams find out the format
567           switch (newStream->format) {
568           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
569              {
570                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
571                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
572                 } else {
573                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
574                 }
575              }
576              break;
577           case HAL_PIXEL_FORMAT_YCbCr_420_888:
578              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
579              break;
580           case HAL_PIXEL_FORMAT_BLOB:
581              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
582              break;
583           default:
584              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
585              break;
586           }
587        }
588        if (newStream->priv == NULL) {
589            //New stream, construct channel
590            switch (newStream->stream_type) {
591            case CAMERA3_STREAM_INPUT:
592                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
593                break;
594            case CAMERA3_STREAM_BIDIRECTIONAL:
595                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
596                    GRALLOC_USAGE_HW_CAMERA_WRITE;
597                break;
598            case CAMERA3_STREAM_OUTPUT:
599                /* For video encoding stream, set read/write rarely
600                 * flag so that they may be set to un-cached */
601                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
602                    newStream->usage =
603                         (GRALLOC_USAGE_SW_READ_RARELY |
604                         GRALLOC_USAGE_SW_WRITE_RARELY |
605                         GRALLOC_USAGE_HW_CAMERA_WRITE);
606                else
607                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
608                break;
609            default:
610                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
611                break;
612            }
613
614            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
615                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
616                QCamera3Channel *channel;
617                switch (newStream->format) {
618                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
619                case HAL_PIXEL_FORMAT_YCbCr_420_888:
620                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
621                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
622                        jpegStream) {
623                        uint32_t width = jpegStream->width;
624                        uint32_t height = jpegStream->height;
625                        mIsZslMode = true;
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream,
629                            width, height);
630                    } else
631                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
632                            mCameraHandle->ops, captureResultCb,
633                            &gCamCapability[mCameraId]->padding_info, this, newStream);
634                    if (channel == NULL) {
635                        ALOGE("%s: allocation of channel failed", __func__);
636                        pthread_mutex_unlock(&mMutex);
637                        return -ENOMEM;
638                    }
639
640                    newStream->priv = channel;
641                    break;
642                case HAL_PIXEL_FORMAT_BLOB:
643                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
644                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
645                            mCameraHandle->ops, captureResultCb,
646                            &gCamCapability[mCameraId]->padding_info, this, newStream);
647                    if (mPictureChannel == NULL) {
648                        ALOGE("%s: allocation of channel failed", __func__);
649                        pthread_mutex_unlock(&mMutex);
650                        return -ENOMEM;
651                    }
652                    newStream->priv = (QCamera3Channel*)mPictureChannel;
653                    break;
654
655                //TODO: Add support for app consumed format?
656                default:
657                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
658                    break;
659                }
660            }
661        } else {
662            // Channel already exists for this stream
663            // Do nothing for now
664        }
665    }
666
667    int32_t hal_version = CAM_HAL_V3;
668    stream_config_info.num_streams = streamList->num_streams;
669
670    // settings/parameters don't carry over for new configureStreams
671    memset(mParameters, 0, sizeof(parm_buffer_t));
672
673    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
674    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
675                sizeof(hal_version), &hal_version);
676
677    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
678                sizeof(stream_config_info), &stream_config_info);
679
680    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
681
682    /*For the streams to be reconfigured we need to register the buffers
683      since the framework wont*/
684    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
685            it != mStreamInfo.end(); it++) {
686        if ((*it)->status == RECONFIGURE) {
687            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
688            /*only register buffers for streams that have already been
689              registered*/
690            if ((*it)->registered) {
691                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
692                        (*it)->buffer_set.buffers);
693                if (rc != NO_ERROR) {
694                    ALOGE("%s: Failed to register the buffers of old stream,\
695                            rc = %d", __func__, rc);
696                }
697                ALOGV("%s: channel %p has %d buffers",
698                        __func__, channel, (*it)->buffer_set.num_buffers);
699            }
700        }
701
702        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
703        if (index == NAME_NOT_FOUND) {
704            mPendingBuffersMap.add((*it)->stream, 0);
705        } else {
706            mPendingBuffersMap.editValueAt(index) = 0;
707        }
708    }
709
710    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
711    mPendingRequestsList.clear();
712
713    /*flush the metadata list*/
714    if (!mStoredMetadataList.empty()) {
715        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
716              m != mStoredMetadataList.end(); m++) {
717            mMetadataChannel->bufDone(m->meta_buf);
718            free(m->meta_buf);
719            m = mStoredMetadataList.erase(m);
720        }
721    }
722
723    mFirstRequest = true;
724
725    //Get min frame duration for this streams configuration
726    deriveMinFrameDuration();
727
728    pthread_mutex_unlock(&mMutex);
729    return rc;
730}
731
732/*===========================================================================
733 * FUNCTION   : validateCaptureRequest
734 *
735 * DESCRIPTION: validate a capture request from camera service
736 *
737 * PARAMETERS :
738 *   @request : request from framework to process
739 *
740 * RETURN     :
741 *
742 *==========================================================================*/
743int QCamera3HardwareInterface::validateCaptureRequest(
744                    camera3_capture_request_t *request)
745{
746    ssize_t idx = 0;
747    const camera3_stream_buffer_t *b;
748    CameraMetadata meta;
749
750    /* Sanity check the request */
751    if (request == NULL) {
752        ALOGE("%s: NULL capture request", __func__);
753        return BAD_VALUE;
754    }
755
756    uint32_t frameNumber = request->frame_number;
757    if (request->input_buffer != NULL &&
758            request->input_buffer->stream != mInputStream) {
759        ALOGE("%s: Request %d: Input buffer not from input stream!",
760                __FUNCTION__, frameNumber);
761        return BAD_VALUE;
762    }
763    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
764        ALOGE("%s: Request %d: No output buffers provided!",
765                __FUNCTION__, frameNumber);
766        return BAD_VALUE;
767    }
768    if (request->input_buffer != NULL) {
769        b = request->input_buffer;
770        QCamera3Channel *channel =
771            static_cast<QCamera3Channel*>(b->stream->priv);
772        if (channel == NULL) {
773            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
774                    __func__, frameNumber, idx);
775            return BAD_VALUE;
776        }
777        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
778            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
779                    __func__, frameNumber, idx);
780            return BAD_VALUE;
781        }
782        if (b->release_fence != -1) {
783            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
784                    __func__, frameNumber, idx);
785            return BAD_VALUE;
786        }
787        if (b->buffer == NULL) {
788            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
789                    __func__, frameNumber, idx);
790            return BAD_VALUE;
791        }
792    }
793
794    // Validate all buffers
795    b = request->output_buffers;
796    do {
797        QCamera3Channel *channel =
798                static_cast<QCamera3Channel*>(b->stream->priv);
799        if (channel == NULL) {
800            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
801                    __func__, frameNumber, idx);
802            return BAD_VALUE;
803        }
804        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
805            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
806                    __func__, frameNumber, idx);
807            return BAD_VALUE;
808        }
809        if (b->release_fence != -1) {
810            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
811                    __func__, frameNumber, idx);
812            return BAD_VALUE;
813        }
814        if (b->buffer == NULL) {
815            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
816                    __func__, frameNumber, idx);
817            return BAD_VALUE;
818        }
819        idx++;
820        b = request->output_buffers + idx;
821    } while (idx < (ssize_t)request->num_output_buffers);
822
823    return NO_ERROR;
824}
825
826/*===========================================================================
827 * FUNCTION   : deriveMinFrameDuration
828 *
829 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
830 *              on currently configured streams.
831 *
832 * PARAMETERS : NONE
833 *
834 * RETURN     : NONE
835 *
836 *==========================================================================*/
837void QCamera3HardwareInterface::deriveMinFrameDuration()
838{
839    int32_t maxJpegDimension, maxProcessedDimension;
840
841    maxJpegDimension = 0;
842    maxProcessedDimension = 0;
843
844    // Figure out maximum jpeg, processed, and raw dimensions
845    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
846        it != mStreamInfo.end(); it++) {
847
848        // Input stream doesn't have valid stream_type
849        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
850            continue;
851
852        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
853        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
854            if (dimension > maxJpegDimension)
855                maxJpegDimension = dimension;
856        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
857            if (dimension > maxProcessedDimension)
858                maxProcessedDimension = dimension;
859        }
860    }
861
862    //Assume all jpeg dimensions are in processed dimensions.
863    if (maxJpegDimension > maxProcessedDimension)
864        maxProcessedDimension = maxJpegDimension;
865
866    //Find minimum durations for processed, jpeg, and raw
867    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
868    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
869        if (maxProcessedDimension ==
870            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
871            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
872            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
873            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
874            break;
875        }
876    }
877}
878
879/*===========================================================================
880 * FUNCTION   : getMinFrameDuration
881 *
882 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
883 *              and current request configuration.
884 *
885 * PARAMETERS : @request: requset sent by the frameworks
886 *
887 * RETURN     : min farme duration for a particular request
888 *
889 *==========================================================================*/
890int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
891{
892    bool hasJpegStream = false;
893    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
894        const camera3_stream_t *stream = request->output_buffers[i].stream;
895        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
896            hasJpegStream = true;
897    }
898
899    if (!hasJpegStream)
900        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
901    else
902        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
903}
904
905/*===========================================================================
906 * FUNCTION   : registerStreamBuffers
907 *
908 * DESCRIPTION: Register buffers for a given stream with the HAL device.
909 *
910 * PARAMETERS :
911 *   @stream_list : streams to be configured
912 *
913 * RETURN     :
914 *
915 *==========================================================================*/
916int QCamera3HardwareInterface::registerStreamBuffers(
917        const camera3_stream_buffer_set_t *buffer_set)
918{
919    int rc = 0;
920
921    pthread_mutex_lock(&mMutex);
922
923    if (buffer_set == NULL) {
924        ALOGE("%s: Invalid buffer_set parameter.", __func__);
925        pthread_mutex_unlock(&mMutex);
926        return -EINVAL;
927    }
928    if (buffer_set->stream == NULL) {
929        ALOGE("%s: Invalid stream parameter.", __func__);
930        pthread_mutex_unlock(&mMutex);
931        return -EINVAL;
932    }
933    if (buffer_set->num_buffers < 1) {
934        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
935        pthread_mutex_unlock(&mMutex);
936        return -EINVAL;
937    }
938    if (buffer_set->buffers == NULL) {
939        ALOGE("%s: Invalid buffers parameter.", __func__);
940        pthread_mutex_unlock(&mMutex);
941        return -EINVAL;
942    }
943
944    camera3_stream_t *stream = buffer_set->stream;
945    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
946
947    //set the buffer_set in the mStreamInfo array
948    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
949            it != mStreamInfo.end(); it++) {
950        if ((*it)->stream == stream) {
951            uint32_t numBuffers = buffer_set->num_buffers;
952            (*it)->buffer_set.stream = buffer_set->stream;
953            (*it)->buffer_set.num_buffers = numBuffers;
954            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
955            if ((*it)->buffer_set.buffers == NULL) {
956                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
957                pthread_mutex_unlock(&mMutex);
958                return -ENOMEM;
959            }
960            for (size_t j = 0; j < numBuffers; j++){
961                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
962            }
963            (*it)->registered = 1;
964        }
965    }
966    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
967    if (rc < 0) {
968        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
969        pthread_mutex_unlock(&mMutex);
970        return -ENODEV;
971    }
972
973    pthread_mutex_unlock(&mMutex);
974    return NO_ERROR;
975}
976
977/*===========================================================================
978 * FUNCTION   : processCaptureRequest
979 *
980 * DESCRIPTION: process a capture request from camera service
981 *
982 * PARAMETERS :
983 *   @request : request from framework to process
984 *
985 * RETURN     :
986 *
987 *==========================================================================*/
988int QCamera3HardwareInterface::processCaptureRequest(
989                    camera3_capture_request_t *request)
990{
991    int rc = NO_ERROR;
992    int32_t request_id;
993    CameraMetadata meta;
994    MetadataBufferInfo reproc_meta;
995    int queueMetadata = 0;
996
997    pthread_mutex_lock(&mMutex);
998
999    rc = validateCaptureRequest(request);
1000    if (rc != NO_ERROR) {
1001        ALOGE("%s: incoming request is not valid", __func__);
1002        pthread_mutex_unlock(&mMutex);
1003        return rc;
1004    }
1005
1006    meta = request->settings;
1007
1008    // For first capture request, send capture intent, and
1009    // stream on all streams
1010    if (mFirstRequest) {
1011
1012        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1013            int32_t hal_version = CAM_HAL_V3;
1014            uint8_t captureIntent =
1015                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1016
1017            memset(mParameters, 0, sizeof(parm_buffer_t));
1018            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1019            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1020                sizeof(hal_version), &hal_version);
1021            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1022                sizeof(captureIntent), &captureIntent);
1023            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1024                mParameters);
1025        }
1026
1027        mMetadataChannel->start();
1028        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1029            it != mStreamInfo.end(); it++) {
1030            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1031            channel->start();
1032        }
1033    }
1034
1035    uint32_t frameNumber = request->frame_number;
1036    uint32_t streamTypeMask = 0;
1037
1038    if (meta.exists(ANDROID_REQUEST_ID)) {
1039        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1040        mCurrentRequestId = request_id;
1041        ALOGV("%s: Received request with id: %d",__func__, request_id);
1042    } else if (mFirstRequest || mCurrentRequestId == -1){
1043        ALOGE("%s: Unable to find request id field, \
1044                & no previous id available", __func__);
1045        return NAME_NOT_FOUND;
1046    } else {
1047        ALOGV("%s: Re-using old request id", __func__);
1048        request_id = mCurrentRequestId;
1049    }
1050
1051    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1052                                    __func__, __LINE__,
1053                                    request->num_output_buffers,
1054                                    request->input_buffer,
1055                                    frameNumber);
1056    // Acquire all request buffers first
1057    int blob_request = 0;
1058    for (size_t i = 0; i < request->num_output_buffers; i++) {
1059        const camera3_stream_buffer_t& output = request->output_buffers[i];
1060        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1061        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1062
1063        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1064        //Call function to store local copy of jpeg data for encode params.
1065            blob_request = 1;
1066            rc = getJpegSettings(request->settings);
1067            if (rc < 0) {
1068                ALOGE("%s: failed to get jpeg parameters", __func__);
1069                pthread_mutex_unlock(&mMutex);
1070                return rc;
1071            }
1072        }
1073
1074        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1075        if (rc != OK) {
1076            ALOGE("%s: fence wait failed %d", __func__, rc);
1077            pthread_mutex_unlock(&mMutex);
1078            return rc;
1079        }
1080        streamTypeMask |= channel->getStreamTypeMask();
1081    }
1082
1083    rc = setFrameParameters(request, streamTypeMask);
1084    if (rc < 0) {
1085        ALOGE("%s: fail to set frame parameters", __func__);
1086        pthread_mutex_unlock(&mMutex);
1087        return rc;
1088    }
1089
1090    /* Update pending request list and pending buffers map */
1091    PendingRequestInfo pendingRequest;
1092    pendingRequest.frame_number = frameNumber;
1093    pendingRequest.num_buffers = request->num_output_buffers;
1094    pendingRequest.request_id = request_id;
1095    pendingRequest.blob_request = blob_request;
1096    if (blob_request)
1097        pendingRequest.input_jpeg_settings = *mJpegSettings;
1098    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1099
1100    for (size_t i = 0; i < request->num_output_buffers; i++) {
1101        RequestedBufferInfo requestedBuf;
1102        requestedBuf.stream = request->output_buffers[i].stream;
1103        requestedBuf.buffer = NULL;
1104        pendingRequest.buffers.push_back(requestedBuf);
1105
1106        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1107    }
1108    mPendingRequestsList.push_back(pendingRequest);
1109
1110    // Notify metadata channel we receive a request
1111    mMetadataChannel->request(NULL, frameNumber);
1112
1113    // Call request on other streams
1114    for (size_t i = 0; i < request->num_output_buffers; i++) {
1115        const camera3_stream_buffer_t& output = request->output_buffers[i];
1116        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1117        mm_camera_buf_def_t *pInputBuffer = NULL;
1118
1119        if (channel == NULL) {
1120            ALOGE("%s: invalid channel pointer for stream", __func__);
1121            continue;
1122        }
1123
1124        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1125            QCamera3RegularChannel* inputChannel = NULL;
1126            if(request->input_buffer != NULL){
1127                //Try to get the internal format
1128                inputChannel = (QCamera3RegularChannel*)
1129                    request->input_buffer->stream->priv;
1130                if(inputChannel == NULL ){
1131                    ALOGE("%s: failed to get input channel handle", __func__);
1132                } else {
1133                    pInputBuffer =
1134                        inputChannel->getInternalFormatBuffer(
1135                                request->input_buffer->buffer);
1136                    ALOGD("%s: Input buffer dump",__func__);
1137                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1138                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1139                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1140                    ALOGD("Handle:%p", request->input_buffer->buffer);
1141                    //TODO: need to get corresponding metadata and send it to pproc
1142                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1143                         m != mStoredMetadataList.end(); m++) {
1144                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1145                            reproc_meta.meta_buf = m->meta_buf;
1146                            queueMetadata = 1;
1147                            break;
1148                        }
1149                    }
1150                }
1151            }
1152            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1153                            pInputBuffer,(QCamera3Channel*)inputChannel);
1154            if (queueMetadata) {
1155                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1156            }
1157        } else {
1158            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1159                __LINE__, output.buffer, frameNumber);
1160            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1161                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1162                     m != mStoredMetadataList.end(); m++) {
1163                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1164                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1165                            mMetadataChannel->bufDone(m->meta_buf);
1166                            free(m->meta_buf);
1167                            m = mStoredMetadataList.erase(m);
1168                            break;
1169                        }
1170                   }
1171                }
1172            }
1173            rc = channel->request(output.buffer, frameNumber);
1174        }
1175        if (rc < 0)
1176            ALOGE("%s: request failed", __func__);
1177    }
1178
1179    mFirstRequest = false;
1180    // Added a timed condition wait
1181    struct timespec ts;
1182    uint8_t isValidTimeout = 1;
1183    rc = clock_gettime(CLOCK_REALTIME, &ts);
1184    if (rc < 0) {
1185        isValidTimeout = 0;
1186        ALOGE("%s: Error reading the real time clock!!", __func__);
1187    }
1188    else {
1189        // Make timeout as 5 sec for request to be honored
1190        ts.tv_sec += 5;
1191    }
1192    //Block on conditional variable
1193    mPendingRequest = 1;
1194    while (mPendingRequest == 1) {
1195        if (!isValidTimeout) {
1196            ALOGV("%s: Blocking on conditional wait", __func__);
1197            pthread_cond_wait(&mRequestCond, &mMutex);
1198        }
1199        else {
1200            ALOGV("%s: Blocking on timed conditional wait", __func__);
1201            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1202            if (rc == ETIMEDOUT) {
1203                rc = -ENODEV;
1204                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1205                break;
1206            }
1207        }
1208        ALOGV("%s: Unblocked", __func__);
1209    }
1210
1211    pthread_mutex_unlock(&mMutex);
1212    return rc;
1213}
1214
1215/*===========================================================================
1216 * FUNCTION   : getMetadataVendorTagOps
1217 *
1218 * DESCRIPTION:
1219 *
1220 * PARAMETERS :
1221 *
1222 *
1223 * RETURN     :
1224 *==========================================================================*/
1225void QCamera3HardwareInterface::getMetadataVendorTagOps(
1226                    vendor_tag_query_ops_t* /*ops*/)
1227{
1228    /* Enable locks when we eventually add Vendor Tags */
1229    /*
1230    pthread_mutex_lock(&mMutex);
1231
1232    pthread_mutex_unlock(&mMutex);
1233    */
1234    return;
1235}
1236
1237/*===========================================================================
1238 * FUNCTION   : dump
1239 *
1240 * DESCRIPTION:
1241 *
1242 * PARAMETERS :
1243 *
1244 *
1245 * RETURN     :
1246 *==========================================================================*/
1247void QCamera3HardwareInterface::dump(int /*fd*/)
1248{
1249    /*Enable lock when we implement this function*/
1250    /*
1251    pthread_mutex_lock(&mMutex);
1252
1253    pthread_mutex_unlock(&mMutex);
1254    */
1255    return;
1256}
1257
1258/*===========================================================================
1259 * FUNCTION   : flush
1260 *
1261 * DESCRIPTION:
1262 *
1263 * PARAMETERS :
1264 *
1265 *
1266 * RETURN     :
1267 *==========================================================================*/
1268int QCamera3HardwareInterface::flush()
1269{
1270    /*Enable lock when we implement this function*/
1271    /*
1272    pthread_mutex_lock(&mMutex);
1273
1274    pthread_mutex_unlock(&mMutex);
1275    */
1276    return 0;
1277}
1278
1279/*===========================================================================
1280 * FUNCTION   : captureResultCb
1281 *
1282 * DESCRIPTION: Callback handler for all capture result
1283 *              (streams, as well as metadata)
1284 *
1285 * PARAMETERS :
1286 *   @metadata : metadata information
1287 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1288 *               NULL if metadata.
1289 *
1290 * RETURN     : NONE
1291 *==========================================================================*/
1292void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1293                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1294{
1295    pthread_mutex_lock(&mMutex);
1296
1297    if (metadata_buf) {
1298        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1299        int32_t frame_number_valid = *(int32_t *)
1300            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1301        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1302            CAM_INTF_META_PENDING_REQUESTS, metadata);
1303        uint32_t frame_number = *(uint32_t *)
1304            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1305        const struct timeval *tv = (const struct timeval *)
1306            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1307        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1308            tv->tv_usec * NSEC_PER_USEC;
1309
1310        if (!frame_number_valid) {
1311            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1312            mMetadataChannel->bufDone(metadata_buf);
1313            free(metadata_buf);
1314            goto done_metadata;
1315        }
1316        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1317                frame_number, capture_time);
1318
1319        // Go through the pending requests info and send shutter/results to frameworks
1320        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1321                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1322            camera3_capture_result_t result;
1323            camera3_notify_msg_t notify_msg;
1324            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1325
1326            // Flush out all entries with less or equal frame numbers.
1327
1328            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1329            //Right now it's the same as metadata timestamp
1330
1331            //TODO: When there is metadata drop, how do we derive the timestamp of
1332            //dropped frames? For now, we fake the dropped timestamp by substracting
1333            //from the reported timestamp
1334            nsecs_t current_capture_time = capture_time -
1335                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1336
1337            // Send shutter notify to frameworks
1338            notify_msg.type = CAMERA3_MSG_SHUTTER;
1339            notify_msg.message.shutter.frame_number = i->frame_number;
1340            notify_msg.message.shutter.timestamp = current_capture_time;
1341            mCallbackOps->notify(mCallbackOps, &notify_msg);
1342            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1343                    i->frame_number, capture_time);
1344
1345            // Send empty metadata with already filled buffers for dropped metadata
1346            // and send valid metadata with already filled buffers for current metadata
1347            if (i->frame_number < frame_number) {
1348                CameraMetadata dummyMetadata;
1349                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1350                        &current_capture_time, 1);
1351                dummyMetadata.update(ANDROID_REQUEST_ID,
1352                        &(i->request_id), 1);
1353                result.result = dummyMetadata.release();
1354            } else {
1355                result.result = translateCbMetadataToResultMetadata(metadata,
1356                        current_capture_time, i->request_id, i->blob_request,
1357                        &(i->input_jpeg_settings));
1358                if (mIsZslMode) {
1359                   int found_metadata = 0;
1360                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1361                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1362                        j != i->buffers.end(); j++) {
1363                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1364                         //check if corresp. zsl already exists in the stored metadata list
1365                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1366                               m != mStoredMetadataList.begin(); m++) {
1367                            if (m->frame_number == frame_number) {
1368                               m->meta_buf = metadata_buf;
1369                               found_metadata = 1;
1370                               break;
1371                            }
1372                         }
1373                         if (!found_metadata) {
1374                            MetadataBufferInfo store_meta_info;
1375                            store_meta_info.meta_buf = metadata_buf;
1376                            store_meta_info.frame_number = frame_number;
1377                            mStoredMetadataList.push_back(store_meta_info);
1378                            found_metadata = 1;
1379                         }
1380                      }
1381                   }
1382                   if (!found_metadata) {
1383                       if (!i->input_buffer_present && i->blob_request) {
1384                          //livesnapshot or fallback non-zsl snapshot case
1385                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1386                                j != i->buffers.end(); j++){
1387                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1388                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1389                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1390                                 break;
1391                              }
1392                         }
1393                       } else {
1394                            //return the metadata immediately
1395                            mMetadataChannel->bufDone(metadata_buf);
1396                            free(metadata_buf);
1397                       }
1398                   }
1399               } else if (!mIsZslMode && i->blob_request) {
1400                   //If it is a blob request then send the metadata to the picture channel
1401                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1402               } else {
1403                   // Return metadata buffer
1404                   mMetadataChannel->bufDone(metadata_buf);
1405                   free(metadata_buf);
1406               }
1407
1408            }
1409            if (!result.result) {
1410                ALOGE("%s: metadata is NULL", __func__);
1411            }
1412            result.frame_number = i->frame_number;
1413            result.num_output_buffers = 0;
1414            result.output_buffers = NULL;
1415            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1416                    j != i->buffers.end(); j++) {
1417                if (j->buffer) {
1418                    result.num_output_buffers++;
1419                }
1420            }
1421
1422            if (result.num_output_buffers > 0) {
1423                camera3_stream_buffer_t *result_buffers =
1424                    new camera3_stream_buffer_t[result.num_output_buffers];
1425                if (!result_buffers) {
1426                    ALOGE("%s: Fatal error: out of memory", __func__);
1427                }
1428                size_t result_buffers_idx = 0;
1429                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1430                        j != i->buffers.end(); j++) {
1431                    if (j->buffer) {
1432                        result_buffers[result_buffers_idx++] = *(j->buffer);
1433                        free(j->buffer);
1434                        j->buffer = NULL;
1435                        mPendingBuffersMap.editValueFor(j->stream)--;
1436                    }
1437                }
1438                result.output_buffers = result_buffers;
1439
1440                mCallbackOps->process_capture_result(mCallbackOps, &result);
1441                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1442                        __func__, result.frame_number, current_capture_time);
1443                free_camera_metadata((camera_metadata_t *)result.result);
1444                delete[] result_buffers;
1445            } else {
1446                mCallbackOps->process_capture_result(mCallbackOps, &result);
1447                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1448                        __func__, result.frame_number, current_capture_time);
1449                free_camera_metadata((camera_metadata_t *)result.result);
1450            }
1451            // erase the element from the list
1452            i = mPendingRequestsList.erase(i);
1453        }
1454
1455
1456done_metadata:
1457        bool max_buffers_dequeued = false;
1458        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1459            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1460            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1461            if (queued_buffers == stream->max_buffers) {
1462                max_buffers_dequeued = true;
1463                break;
1464            }
1465        }
1466        if (!max_buffers_dequeued && !pending_requests) {
1467            // Unblock process_capture_request
1468            mPendingRequest = 0;
1469            pthread_cond_signal(&mRequestCond);
1470        }
1471    } else {
1472        // If the frame number doesn't exist in the pending request list,
1473        // directly send the buffer to the frameworks, and update pending buffers map
1474        // Otherwise, book-keep the buffer.
1475        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1476        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1477            i++;
1478        }
1479        if (i == mPendingRequestsList.end()) {
1480            // Verify all pending requests frame_numbers are greater
1481            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1482                    j != mPendingRequestsList.end(); j++) {
1483                if (j->frame_number < frame_number) {
1484                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1485                            __func__, j->frame_number, frame_number);
1486                }
1487            }
1488            camera3_capture_result_t result;
1489            result.result = NULL;
1490            result.frame_number = frame_number;
1491            result.num_output_buffers = 1;
1492            result.output_buffers = buffer;
1493            ALOGV("%s: result frame_number = %d, buffer = %p",
1494                    __func__, frame_number, buffer);
1495            mPendingBuffersMap.editValueFor(buffer->stream)--;
1496            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1497                int found = 0;
1498                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1499                      k != mStoredMetadataList.end(); k++) {
1500                    if (k->frame_number == frame_number) {
1501                        k->zsl_buf_hdl = buffer->buffer;
1502                        found = 1;
1503                        break;
1504                    }
1505                }
1506                if (!found) {
1507                   MetadataBufferInfo meta_info;
1508                   meta_info.frame_number = frame_number;
1509                   meta_info.zsl_buf_hdl = buffer->buffer;
1510                   mStoredMetadataList.push_back(meta_info);
1511                }
1512            }
1513            mCallbackOps->process_capture_result(mCallbackOps, &result);
1514        } else {
1515            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1516                    j != i->buffers.end(); j++) {
1517                if (j->stream == buffer->stream) {
1518                    if (j->buffer != NULL) {
1519                        ALOGE("%s: Error: buffer is already set", __func__);
1520                    } else {
1521                        j->buffer = (camera3_stream_buffer_t *)malloc(
1522                                sizeof(camera3_stream_buffer_t));
1523                        *(j->buffer) = *buffer;
1524                        ALOGV("%s: cache buffer %p at result frame_number %d",
1525                                __func__, buffer, frame_number);
1526                    }
1527                }
1528            }
1529        }
1530    }
1531    pthread_mutex_unlock(&mMutex);
1532    return;
1533}
1534
1535/*===========================================================================
1536 * FUNCTION   : translateCbMetadataToResultMetadata
1537 *
1538 * DESCRIPTION:
1539 *
1540 * PARAMETERS :
1541 *   @metadata : metadata information from callback
1542 *
1543 * RETURN     : camera_metadata_t*
1544 *              metadata in a format specified by fwk
1545 *==========================================================================*/
1546camera_metadata_t*
1547QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1548                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1549                                 int32_t request_id, int32_t BlobRequest,
1550                                 jpeg_settings_t* inputjpegsettings)
1551{
1552    CameraMetadata camMetadata;
1553    camera_metadata_t* resultMetadata;
1554
1555    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1556    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1557
1558    // Update the JPEG related info
1559    if (BlobRequest) {
1560        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1561        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1562
1563        int32_t thumbnailSizeTable[2];
1564        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1565        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1566        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1567        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1568               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1569
1570        if (inputjpegsettings->gps_coordinates[0]) {
1571            double gpsCoordinates[3];
1572            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1573            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1574            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1575            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1576            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1577                 gpsCoordinates[1],gpsCoordinates[2]);
1578        }
1579
1580        if (inputjpegsettings->gps_timestamp) {
1581            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1582            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1583        }
1584
1585        String8 str(inputjpegsettings->gps_processing_method);
1586        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1587            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1588        }
1589    }
1590    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1591    uint8_t next_entry;
1592    while (curr_entry != CAM_INTF_PARM_MAX) {
1593       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1594       switch (curr_entry) {
1595         case CAM_INTF_META_FACE_DETECTION:{
1596             cam_face_detection_data_t *faceDetectionInfo =
1597                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1598             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1599             int32_t faceIds[numFaces];
1600             uint8_t faceScores[numFaces];
1601             int32_t faceRectangles[numFaces * 4];
1602             int32_t faceLandmarks[numFaces * 6];
1603             int j = 0, k = 0;
1604             for (int i = 0; i < numFaces; i++) {
1605                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1606                 faceScores[i] = faceDetectionInfo->faces[i].score;
1607                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1608                         faceRectangles+j, -1);
1609                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1610                 j+= 4;
1611                 k+= 6;
1612             }
1613             if (numFaces > 0) {
1614                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1615                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1616                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1617                     faceRectangles, numFaces*4);
1618                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1619                     faceLandmarks, numFaces*6);
1620             }
1621            break;
1622            }
1623         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1624             uint8_t  *color_correct_mode =
1625                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1626             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1627             break;
1628          }
1629         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1630             int32_t  *ae_precapture_id =
1631                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1632             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1633             break;
1634          }
1635         case CAM_INTF_META_AEC_ROI: {
1636            cam_area_t  *hAeRegions =
1637                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1638             int32_t aeRegions[5];
1639             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1640             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1641             break;
1642          }
1643          case CAM_INTF_META_AEC_STATE:{
1644             uint8_t *ae_state =
1645                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1646             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1647             break;
1648          }
1649          case CAM_INTF_PARM_FOCUS_MODE:{
1650             uint8_t  *focusMode =
1651                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1652             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1653                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1654             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1655             break;
1656          }
1657          case CAM_INTF_META_AF_ROI:{
1658             /*af regions*/
1659             cam_area_t  *hAfRegions =
1660                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1661             int32_t afRegions[5];
1662             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1663             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1664             break;
1665          }
1666          case CAM_INTF_META_AF_STATE: {
1667             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1668             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1669             break;
1670          }
1671          case CAM_INTF_META_AF_TRIGGER_ID: {
1672             int32_t  *afTriggerId =
1673                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1674             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1675             break;
1676          }
1677          case CAM_INTF_PARM_WHITE_BALANCE: {
1678               uint8_t  *whiteBalance =
1679                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1680               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1681                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1682                   *whiteBalance);
1683               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1684               break;
1685          }
1686          case CAM_INTF_META_AWB_REGIONS: {
1687             /*awb regions*/
1688             cam_area_t  *hAwbRegions =
1689                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1690             int32_t awbRegions[5];
1691             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1692             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1693             break;
1694          }
1695          case CAM_INTF_META_AWB_STATE: {
1696             uint8_t  *whiteBalanceState =
1697                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1698             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1699             break;
1700          }
1701          case CAM_INTF_META_MODE: {
1702             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1703             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1704             break;
1705          }
1706          case CAM_INTF_META_EDGE_MODE: {
1707             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1708             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1709             break;
1710          }
1711          case CAM_INTF_META_FLASH_POWER: {
1712             uint8_t  *flashPower =
1713                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1714             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1715             break;
1716          }
1717          case CAM_INTF_META_FLASH_FIRING_TIME: {
1718             int64_t  *flashFiringTime =
1719                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1720             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1721             break;
1722          }
1723          case CAM_INTF_META_FLASH_STATE: {
1724             uint8_t  *flashState =
1725                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1726             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1727             break;
1728          }
1729          case CAM_INTF_META_FLASH_MODE:{
1730             uint8_t *flashMode = (uint8_t*)
1731                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1732             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1733             break;
1734          }
1735          case CAM_INTF_META_HOTPIXEL_MODE: {
1736              uint8_t  *hotPixelMode =
1737                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1738              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1739              break;
1740          }
1741          case CAM_INTF_META_LENS_APERTURE:{
1742             float  *lensAperture =
1743                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1744             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1745             break;
1746          }
1747          case CAM_INTF_META_LENS_FILTERDENSITY: {
1748             float  *filterDensity =
1749                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1750             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1751             break;
1752          }
1753          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1754             float  *focalLength =
1755                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1756             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1757             break;
1758          }
1759          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1760             float  *focusDistance =
1761                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1762             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1763             break;
1764          }
1765          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1766             float  *focusRange =
1767                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1768             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1769          }
1770          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1771             uint8_t  *opticalStab =
1772                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1773             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1774          }
1775          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1776             uint8_t  *noiseRedMode =
1777                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1778             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1779             break;
1780          }
1781          case CAM_INTF_META_SCALER_CROP_REGION: {
1782             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1783             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1784             int32_t scalerCropRegion[4];
1785             scalerCropRegion[0] = hScalerCropRegion->left;
1786             scalerCropRegion[1] = hScalerCropRegion->top;
1787             scalerCropRegion[2] = hScalerCropRegion->width;
1788             scalerCropRegion[3] = hScalerCropRegion->height;
1789             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1790             break;
1791          }
1792          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1793             int64_t  *sensorExpTime =
1794                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1795             mMetadataResponse.exposure_time = *sensorExpTime;
1796             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1797             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1798             break;
1799          }
1800          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1801             int64_t  *sensorFameDuration =
1802                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1803             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1804             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1805             break;
1806          }
1807          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1808             int32_t  *sensorSensitivity =
1809                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1810             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1811             mMetadataResponse.iso_speed = *sensorSensitivity;
1812             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1813             break;
1814          }
1815          case CAM_INTF_META_SHADING_MODE: {
1816             uint8_t  *shadingMode =
1817                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1818             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1819             break;
1820          }
1821          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1822             uint8_t  *faceDetectMode =
1823                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1824             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1825                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1826                                                        *faceDetectMode);
1827             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1828             break;
1829          }
1830          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1831             uint8_t  *histogramMode =
1832                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1833             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1834             break;
1835          }
1836          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1837               uint8_t  *sharpnessMapMode =
1838                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1839               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1840                                  sharpnessMapMode, 1);
1841               break;
1842           }
1843          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1844               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1845               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1846               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1847                                  (int32_t*)sharpnessMap->sharpness,
1848                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1849               break;
1850          }
1851          case CAM_INTF_META_LENS_SHADING_MAP: {
1852               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1853               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1854               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1855               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1856               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1857                                  (float*)lensShadingMap->lens_shading,
1858                                  4*map_width*map_height);
1859               break;
1860          }
1861          case CAM_INTF_META_TONEMAP_CURVES:{
1862             //Populate CAM_INTF_META_TONEMAP_CURVES
1863             /* ch0 = G, ch 1 = B, ch 2 = R*/
1864             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1865             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1866             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1867                                (float*)tonemap->curves[0].tonemap_points,
1868                                tonemap->tonemap_points_cnt * 2);
1869
1870             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1871                                (float*)tonemap->curves[1].tonemap_points,
1872                                tonemap->tonemap_points_cnt * 2);
1873
1874             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1875                                (float*)tonemap->curves[2].tonemap_points,
1876                                tonemap->tonemap_points_cnt * 2);
1877             break;
1878          }
1879          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1880             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1881             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1882             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1883             break;
1884          }
1885          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1886              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1887              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1888              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1889                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1890              break;
1891          }
1892          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1893             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1894             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1895             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1896                       predColorCorrectionGains->gains, 4);
1897             break;
1898          }
1899          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1900             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1901                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1902             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1903                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1904             break;
1905
1906          }
1907          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1908             uint8_t *blackLevelLock = (uint8_t*)
1909               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1910             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1911             break;
1912          }
1913          case CAM_INTF_META_SCENE_FLICKER:{
1914             uint8_t *sceneFlicker = (uint8_t*)
1915             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1916             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1917             break;
1918          }
1919          case CAM_INTF_PARM_LED_MODE:
1920             break;
1921          default:
1922             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1923                   __func__, curr_entry);
1924             break;
1925       }
1926       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1927       curr_entry = next_entry;
1928    }
1929    resultMetadata = camMetadata.release();
1930    return resultMetadata;
1931}
1932
1933/*===========================================================================
1934 * FUNCTION   : convertToRegions
1935 *
1936 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1937 *
1938 * PARAMETERS :
1939 *   @rect   : cam_rect_t struct to convert
1940 *   @region : int32_t destination array
1941 *   @weight : if we are converting from cam_area_t, weight is valid
1942 *             else weight = -1
1943 *
1944 *==========================================================================*/
1945void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1946    region[0] = rect.left;
1947    region[1] = rect.top;
1948    region[2] = rect.left + rect.width;
1949    region[3] = rect.top + rect.height;
1950    if (weight > -1) {
1951        region[4] = weight;
1952    }
1953}
1954
1955/*===========================================================================
1956 * FUNCTION   : convertFromRegions
1957 *
1958 * DESCRIPTION: helper method to convert from array to cam_rect_t
1959 *
1960 * PARAMETERS :
1961 *   @rect   : cam_rect_t struct to convert
1962 *   @region : int32_t destination array
1963 *   @weight : if we are converting from cam_area_t, weight is valid
1964 *             else weight = -1
1965 *
1966 *==========================================================================*/
1967void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1968                                                   const camera_metadata_t *settings,
1969                                                   uint32_t tag){
1970    CameraMetadata frame_settings;
1971    frame_settings = settings;
1972    int32_t x_min = frame_settings.find(tag).data.i32[0];
1973    int32_t y_min = frame_settings.find(tag).data.i32[1];
1974    int32_t x_max = frame_settings.find(tag).data.i32[2];
1975    int32_t y_max = frame_settings.find(tag).data.i32[3];
1976    roi->weight = frame_settings.find(tag).data.i32[4];
1977    roi->rect.left = x_min;
1978    roi->rect.top = y_min;
1979    roi->rect.width = x_max - x_min;
1980    roi->rect.height = y_max - y_min;
1981}
1982
1983/*===========================================================================
1984 * FUNCTION   : resetIfNeededROI
1985 *
1986 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1987 *              crop region
1988 *
1989 * PARAMETERS :
1990 *   @roi       : cam_area_t struct to resize
1991 *   @scalerCropRegion : cam_crop_region_t region to compare against
1992 *
1993 *
1994 *==========================================================================*/
1995bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1996                                                 const cam_crop_region_t* scalerCropRegion)
1997{
1998    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1999    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2000    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
2001    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
2002    if ((roi_x_max < scalerCropRegion->left) ||
2003        (roi_y_max < scalerCropRegion->top)  ||
2004        (roi->rect.left > crop_x_max) ||
2005        (roi->rect.top > crop_y_max)){
2006        return false;
2007    }
2008    if (roi->rect.left < scalerCropRegion->left) {
2009        roi->rect.left = scalerCropRegion->left;
2010    }
2011    if (roi->rect.top < scalerCropRegion->top) {
2012        roi->rect.top = scalerCropRegion->top;
2013    }
2014    if (roi_x_max > crop_x_max) {
2015        roi_x_max = crop_x_max;
2016    }
2017    if (roi_y_max > crop_y_max) {
2018        roi_y_max = crop_y_max;
2019    }
2020    roi->rect.width = roi_x_max - roi->rect.left;
2021    roi->rect.height = roi_y_max - roi->rect.top;
2022    return true;
2023}
2024
2025/*===========================================================================
2026 * FUNCTION   : convertLandmarks
2027 *
2028 * DESCRIPTION: helper method to extract the landmarks from face detection info
2029 *
2030 * PARAMETERS :
2031 *   @face   : cam_rect_t struct to convert
2032 *   @landmarks : int32_t destination array
2033 *
2034 *
2035 *==========================================================================*/
2036void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2037{
2038    landmarks[0] = face.left_eye_center.x;
2039    landmarks[1] = face.left_eye_center.y;
2040    landmarks[2] = face.right_eye_center.y;
2041    landmarks[3] = face.right_eye_center.y;
2042    landmarks[4] = face.mouth_center.x;
2043    landmarks[5] = face.mouth_center.y;
2044}
2045
2046#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2047/*===========================================================================
2048 * FUNCTION   : initCapabilities
2049 *
2050 * DESCRIPTION: initialize camera capabilities in static data struct
2051 *
2052 * PARAMETERS :
2053 *   @cameraId  : camera Id
2054 *
2055 * RETURN     : int32_t type of status
2056 *              NO_ERROR  -- success
2057 *              none-zero failure code
2058 *==========================================================================*/
2059int QCamera3HardwareInterface::initCapabilities(int cameraId)
2060{
2061    int rc = 0;
2062    mm_camera_vtbl_t *cameraHandle = NULL;
2063    QCamera3HeapMemory *capabilityHeap = NULL;
2064
2065    cameraHandle = camera_open(cameraId);
2066    if (!cameraHandle) {
2067        ALOGE("%s: camera_open failed", __func__);
2068        rc = -1;
2069        goto open_failed;
2070    }
2071
2072    capabilityHeap = new QCamera3HeapMemory();
2073    if (capabilityHeap == NULL) {
2074        ALOGE("%s: creation of capabilityHeap failed", __func__);
2075        goto heap_creation_failed;
2076    }
2077    /* Allocate memory for capability buffer */
2078    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2079    if(rc != OK) {
2080        ALOGE("%s: No memory for cappability", __func__);
2081        goto allocate_failed;
2082    }
2083
2084    /* Map memory for capability buffer */
2085    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2086    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2087                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2088                                capabilityHeap->getFd(0),
2089                                sizeof(cam_capability_t));
2090    if(rc < 0) {
2091        ALOGE("%s: failed to map capability buffer", __func__);
2092        goto map_failed;
2093    }
2094
2095    /* Query Capability */
2096    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2097    if(rc < 0) {
2098        ALOGE("%s: failed to query capability",__func__);
2099        goto query_failed;
2100    }
2101    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2102    if (!gCamCapability[cameraId]) {
2103        ALOGE("%s: out of memory", __func__);
2104        goto query_failed;
2105    }
2106    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2107                                        sizeof(cam_capability_t));
2108    rc = 0;
2109
2110query_failed:
2111    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2112                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2113map_failed:
2114    capabilityHeap->deallocate();
2115allocate_failed:
2116    delete capabilityHeap;
2117heap_creation_failed:
2118    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2119    cameraHandle = NULL;
2120open_failed:
2121    return rc;
2122}
2123
2124/*===========================================================================
2125 * FUNCTION   : initParameters
2126 *
2127 * DESCRIPTION: initialize camera parameters
2128 *
2129 * PARAMETERS :
2130 *
2131 * RETURN     : int32_t type of status
2132 *              NO_ERROR  -- success
2133 *              none-zero failure code
2134 *==========================================================================*/
2135int QCamera3HardwareInterface::initParameters()
2136{
2137    int rc = 0;
2138
2139    //Allocate Set Param Buffer
2140    mParamHeap = new QCamera3HeapMemory();
2141    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2142    if(rc != OK) {
2143        rc = NO_MEMORY;
2144        ALOGE("Failed to allocate SETPARM Heap memory");
2145        delete mParamHeap;
2146        mParamHeap = NULL;
2147        return rc;
2148    }
2149
2150    //Map memory for parameters buffer
2151    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2152            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2153            mParamHeap->getFd(0),
2154            sizeof(parm_buffer_t));
2155    if(rc < 0) {
2156        ALOGE("%s:failed to map SETPARM buffer",__func__);
2157        rc = FAILED_TRANSACTION;
2158        mParamHeap->deallocate();
2159        delete mParamHeap;
2160        mParamHeap = NULL;
2161        return rc;
2162    }
2163
2164    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2165    return rc;
2166}
2167
2168/*===========================================================================
2169 * FUNCTION   : deinitParameters
2170 *
2171 * DESCRIPTION: de-initialize camera parameters
2172 *
2173 * PARAMETERS :
2174 *
2175 * RETURN     : NONE
2176 *==========================================================================*/
2177void QCamera3HardwareInterface::deinitParameters()
2178{
2179    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2180            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2181
2182    mParamHeap->deallocate();
2183    delete mParamHeap;
2184    mParamHeap = NULL;
2185
2186    mParameters = NULL;
2187}
2188
2189/*===========================================================================
2190 * FUNCTION   : calcMaxJpegSize
2191 *
2192 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2193 *
2194 * PARAMETERS :
2195 *
2196 * RETURN     : max_jpeg_size
2197 *==========================================================================*/
2198int QCamera3HardwareInterface::calcMaxJpegSize()
2199{
2200    int32_t max_jpeg_size = 0;
2201    int temp_width, temp_height;
2202    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2203        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2204        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2205        if (temp_width * temp_height > max_jpeg_size ) {
2206            max_jpeg_size = temp_width * temp_height;
2207        }
2208    }
2209    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2210    return max_jpeg_size;
2211}
2212
2213/*===========================================================================
2214 * FUNCTION   : initStaticMetadata
2215 *
2216 * DESCRIPTION: initialize the static metadata
2217 *
2218 * PARAMETERS :
2219 *   @cameraId  : camera Id
2220 *
2221 * RETURN     : int32_t type of status
2222 *              0  -- success
2223 *              non-zero failure code
2224 *==========================================================================*/
2225int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2226{
2227    int rc = 0;
2228    CameraMetadata staticInfo;
2229
2230    /* android.info: hardware level */
2231    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2232    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2233        &supportedHardwareLevel, 1);
2234
2235    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2236    /*HAL 3 only*/
2237    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2238                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2239
2240    /*hard coded for now but this should come from sensor*/
2241    float min_focus_distance;
2242    if(facingBack){
2243        min_focus_distance = 10;
2244    } else {
2245        min_focus_distance = 0;
2246    }
2247    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2248                    &min_focus_distance, 1);
2249
2250    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2251                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2252
2253    /*should be using focal lengths but sensor doesn't provide that info now*/
2254    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2255                      &gCamCapability[cameraId]->focal_length,
2256                      1);
2257
2258    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2259                      gCamCapability[cameraId]->apertures,
2260                      gCamCapability[cameraId]->apertures_count);
2261
2262    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2263                gCamCapability[cameraId]->filter_densities,
2264                gCamCapability[cameraId]->filter_densities_count);
2265
2266
2267    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2268                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2269                      gCamCapability[cameraId]->optical_stab_modes_count);
2270
2271    staticInfo.update(ANDROID_LENS_POSITION,
2272                      gCamCapability[cameraId]->lens_position,
2273                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2274
2275    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2276                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2277    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2278                      lens_shading_map_size,
2279                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2280
2281    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2282                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2283    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2284            geo_correction_map_size,
2285            sizeof(geo_correction_map_size)/sizeof(int32_t));
2286
2287    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2288                       gCamCapability[cameraId]->geo_correction_map,
2289                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2290
2291    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2292            gCamCapability[cameraId]->sensor_physical_size, 2);
2293
2294    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2295            gCamCapability[cameraId]->exposure_time_range, 2);
2296
2297    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2298            &gCamCapability[cameraId]->max_frame_duration, 1);
2299
2300    camera_metadata_rational baseGainFactor = {
2301            gCamCapability[cameraId]->base_gain_factor.numerator,
2302            gCamCapability[cameraId]->base_gain_factor.denominator};
2303    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2304                      &baseGainFactor, 1);
2305
2306    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2307                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2308
2309    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2310                                               gCamCapability[cameraId]->pixel_array_size.height};
2311    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2312                      pixel_array_size, 2);
2313
2314    int32_t active_array_size[] = {0, 0,
2315                                                gCamCapability[cameraId]->active_array_size.width,
2316                                                gCamCapability[cameraId]->active_array_size.height};
2317    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2318                      active_array_size, 4);
2319
2320    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2321            &gCamCapability[cameraId]->white_level, 1);
2322
2323    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2324            gCamCapability[cameraId]->black_level_pattern, 4);
2325
2326    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2327                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2328
2329    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2330                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2331
2332    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2333                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2334
2335    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2336                      &gCamCapability[cameraId]->histogram_size, 1);
2337
2338    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2339            &gCamCapability[cameraId]->max_histogram_count, 1);
2340
2341    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2342                                                gCamCapability[cameraId]->sharpness_map_size.height};
2343
2344    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2345            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2346
2347    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2348            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2349
2350
2351    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2352                      &gCamCapability[cameraId]->raw_min_duration,
2353                       1);
2354
2355    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2356                                                HAL_PIXEL_FORMAT_BLOB};
2357    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2358    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2359                      scalar_formats,
2360                      scalar_formats_count);
2361
2362    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2363    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2364              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2365              available_processed_sizes);
2366    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2367                available_processed_sizes,
2368                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2369
2370    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2371                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2372                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2373
2374    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2375    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2376                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2377                 available_fps_ranges);
2378    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2379            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2380
2381    camera_metadata_rational exposureCompensationStep = {
2382            gCamCapability[cameraId]->exp_compensation_step.numerator,
2383            gCamCapability[cameraId]->exp_compensation_step.denominator};
2384    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2385                      &exposureCompensationStep, 1);
2386
2387    /*TO DO*/
2388    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2389    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2390                      availableVstabModes, sizeof(availableVstabModes));
2391
2392    /*HAL 1 and HAL 3 common*/
2393    float maxZoom = 4;
2394    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2395            &maxZoom, 1);
2396
2397    int32_t max3aRegions = 1;
2398    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2399            &max3aRegions, 1);
2400
2401    uint8_t availableFaceDetectModes[] = {
2402            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2403            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2404    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2405                      availableFaceDetectModes,
2406                      sizeof(availableFaceDetectModes));
2407
2408    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2409                                                        gCamCapability[cameraId]->exposure_compensation_max};
2410    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2411            exposureCompensationRange,
2412            sizeof(exposureCompensationRange)/sizeof(int32_t));
2413
2414    uint8_t lensFacing = (facingBack) ?
2415            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2416    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2417
2418    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2419                available_processed_sizes,
2420                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2421
2422    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2423                      available_thumbnail_sizes,
2424                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2425
2426    int32_t max_jpeg_size = 0;
2427    int temp_width, temp_height;
2428    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2429        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2430        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2431        if (temp_width * temp_height > max_jpeg_size ) {
2432            max_jpeg_size = temp_width * temp_height;
2433        }
2434    }
2435    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2436    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2437                      &max_jpeg_size, 1);
2438
2439    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2440    int32_t size = 0;
2441    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2442        int val = lookupFwkName(EFFECT_MODES_MAP,
2443                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2444                                   gCamCapability[cameraId]->supported_effects[i]);
2445        if (val != NAME_NOT_FOUND) {
2446            avail_effects[size] = (uint8_t)val;
2447            size++;
2448        }
2449    }
2450    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2451                      avail_effects,
2452                      size);
2453
2454    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2455    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2456    int32_t supported_scene_modes_cnt = 0;
2457    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2458        int val = lookupFwkName(SCENE_MODES_MAP,
2459                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2460                                gCamCapability[cameraId]->supported_scene_modes[i]);
2461        if (val != NAME_NOT_FOUND) {
2462            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2463            supported_indexes[supported_scene_modes_cnt] = i;
2464            supported_scene_modes_cnt++;
2465        }
2466    }
2467
2468    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2469                      avail_scene_modes,
2470                      supported_scene_modes_cnt);
2471
2472    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2473    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2474                      supported_scene_modes_cnt,
2475                      scene_mode_overrides,
2476                      supported_indexes,
2477                      cameraId);
2478    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2479                      scene_mode_overrides,
2480                      supported_scene_modes_cnt*3);
2481
2482    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2483    size = 0;
2484    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2485        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2486                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2487                                 gCamCapability[cameraId]->supported_antibandings[i]);
2488        if (val != NAME_NOT_FOUND) {
2489            avail_antibanding_modes[size] = (uint8_t)val;
2490            size++;
2491        }
2492
2493    }
2494    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2495                      avail_antibanding_modes,
2496                      size);
2497
2498    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2499    size = 0;
2500    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2501        int val = lookupFwkName(FOCUS_MODES_MAP,
2502                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2503                                gCamCapability[cameraId]->supported_focus_modes[i]);
2504        if (val != NAME_NOT_FOUND) {
2505            avail_af_modes[size] = (uint8_t)val;
2506            size++;
2507        }
2508    }
2509    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2510                      avail_af_modes,
2511                      size);
2512
2513    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2514    size = 0;
2515    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2516        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2517                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2518                                    gCamCapability[cameraId]->supported_white_balances[i]);
2519        if (val != NAME_NOT_FOUND) {
2520            avail_awb_modes[size] = (uint8_t)val;
2521            size++;
2522        }
2523    }
2524    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2525                      avail_awb_modes,
2526                      size);
2527
2528    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2529    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2530      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2531
2532    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2533            available_flash_levels,
2534            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2535
2536
2537    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2538    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2539            &flashAvailable, 1);
2540
2541    uint8_t avail_ae_modes[5];
2542    size = 0;
2543    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2544        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2545        size++;
2546    }
2547    if (flashAvailable) {
2548        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2549        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2550        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2551    }
2552    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2553                      avail_ae_modes,
2554                      size);
2555
2556    int32_t sensitivity_range[2];
2557    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2558    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2559    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2560                      sensitivity_range,
2561                      sizeof(sensitivity_range) / sizeof(int32_t));
2562
2563    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2564                      &gCamCapability[cameraId]->max_analog_sensitivity,
2565                      1);
2566
2567    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2568                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2569                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2570
2571    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2572    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2573                      &sensor_orientation,
2574                      1);
2575
2576    int32_t max_output_streams[3] = {1, 3, 1};
2577    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2578                      max_output_streams,
2579                      3);
2580
2581    gStaticMetadata[cameraId] = staticInfo.release();
2582    return rc;
2583}
2584
2585/*===========================================================================
2586 * FUNCTION   : makeTable
2587 *
2588 * DESCRIPTION: make a table of sizes
2589 *
2590 * PARAMETERS :
2591 *
2592 *
2593 *==========================================================================*/
2594void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2595                                          int32_t* sizeTable)
2596{
2597    int j = 0;
2598    for (int i = 0; i < size; i++) {
2599        sizeTable[j] = dimTable[i].width;
2600        sizeTable[j+1] = dimTable[i].height;
2601        j+=2;
2602    }
2603}
2604
2605/*===========================================================================
2606 * FUNCTION   : makeFPSTable
2607 *
2608 * DESCRIPTION: make a table of fps ranges
2609 *
2610 * PARAMETERS :
2611 *
2612 *==========================================================================*/
2613void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2614                                          int32_t* fpsRangesTable)
2615{
2616    int j = 0;
2617    for (int i = 0; i < size; i++) {
2618        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2619        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2620        j+=2;
2621    }
2622}
2623
2624/*===========================================================================
2625 * FUNCTION   : makeOverridesList
2626 *
2627 * DESCRIPTION: make a list of scene mode overrides
2628 *
2629 * PARAMETERS :
2630 *
2631 *
2632 *==========================================================================*/
2633void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2634                                                  uint8_t size, uint8_t* overridesList,
2635                                                  uint8_t* supported_indexes,
2636                                                  int camera_id)
2637{
2638    /*daemon will give a list of overrides for all scene modes.
2639      However we should send the fwk only the overrides for the scene modes
2640      supported by the framework*/
2641    int j = 0, index = 0, supt = 0;
2642    uint8_t focus_override;
2643    for (int i = 0; i < size; i++) {
2644        supt = 0;
2645        index = supported_indexes[i];
2646        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2647        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2648                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2649                                                    overridesTable[index].awb_mode);
2650        focus_override = (uint8_t)overridesTable[index].af_mode;
2651        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2652           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2653              supt = 1;
2654              break;
2655           }
2656        }
2657        if (supt) {
2658           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2659                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2660                                              focus_override);
2661        } else {
2662           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2663        }
2664        j+=3;
2665    }
2666}
2667
2668/*===========================================================================
2669 * FUNCTION   : getPreviewHalPixelFormat
2670 *
2671 * DESCRIPTION: convert the format to type recognized by framework
2672 *
2673 * PARAMETERS : format : the format from backend
2674 *
2675 ** RETURN    : format recognized by framework
2676 *
2677 *==========================================================================*/
2678int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2679{
2680    int32_t halPixelFormat;
2681
2682    switch (format) {
2683    case CAM_FORMAT_YUV_420_NV12:
2684        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2685        break;
2686    case CAM_FORMAT_YUV_420_NV21:
2687        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2688        break;
2689    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2690        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2691        break;
2692    case CAM_FORMAT_YUV_420_YV12:
2693        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2694        break;
2695    case CAM_FORMAT_YUV_422_NV16:
2696    case CAM_FORMAT_YUV_422_NV61:
2697    default:
2698        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2699        break;
2700    }
2701    return halPixelFormat;
2702}
2703
2704/*===========================================================================
2705 * FUNCTION   : getSensorSensitivity
2706 *
2707 * DESCRIPTION: convert iso_mode to an integer value
2708 *
2709 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2710 *
2711 ** RETURN    : sensitivity supported by sensor
2712 *
2713 *==========================================================================*/
2714int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2715{
2716    int32_t sensitivity;
2717
2718    switch (iso_mode) {
2719    case CAM_ISO_MODE_100:
2720        sensitivity = 100;
2721        break;
2722    case CAM_ISO_MODE_200:
2723        sensitivity = 200;
2724        break;
2725    case CAM_ISO_MODE_400:
2726        sensitivity = 400;
2727        break;
2728    case CAM_ISO_MODE_800:
2729        sensitivity = 800;
2730        break;
2731    case CAM_ISO_MODE_1600:
2732        sensitivity = 1600;
2733        break;
2734    default:
2735        sensitivity = -1;
2736        break;
2737    }
2738    return sensitivity;
2739}
2740
2741
2742/*===========================================================================
2743 * FUNCTION   : AddSetParmEntryToBatch
2744 *
2745 * DESCRIPTION: add set parameter entry into batch
2746 *
2747 * PARAMETERS :
2748 *   @p_table     : ptr to parameter buffer
2749 *   @paramType   : parameter type
2750 *   @paramLength : length of parameter value
2751 *   @paramValue  : ptr to parameter value
2752 *
2753 * RETURN     : int32_t type of status
2754 *              NO_ERROR  -- success
2755 *              none-zero failure code
2756 *==========================================================================*/
2757int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2758                                                          cam_intf_parm_type_t paramType,
2759                                                          uint32_t paramLength,
2760                                                          void *paramValue)
2761{
2762    int position = paramType;
2763    int current, next;
2764
2765    /*************************************************************************
2766    *                 Code to take care of linking next flags                *
2767    *************************************************************************/
2768    current = GET_FIRST_PARAM_ID(p_table);
2769    if (position == current){
2770        //DO NOTHING
2771    } else if (position < current){
2772        SET_NEXT_PARAM_ID(position, p_table, current);
2773        SET_FIRST_PARAM_ID(p_table, position);
2774    } else {
2775        /* Search for the position in the linked list where we need to slot in*/
2776        while (position > GET_NEXT_PARAM_ID(current, p_table))
2777            current = GET_NEXT_PARAM_ID(current, p_table);
2778
2779        /*If node already exists no need to alter linking*/
2780        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2781            next = GET_NEXT_PARAM_ID(current, p_table);
2782            SET_NEXT_PARAM_ID(current, p_table, position);
2783            SET_NEXT_PARAM_ID(position, p_table, next);
2784        }
2785    }
2786
2787    /*************************************************************************
2788    *                   Copy contents into entry                             *
2789    *************************************************************************/
2790
2791    if (paramLength > sizeof(parm_type_t)) {
2792        ALOGE("%s:Size of input larger than max entry size",__func__);
2793        return BAD_VALUE;
2794    }
2795    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2796    return NO_ERROR;
2797}
2798
2799/*===========================================================================
2800 * FUNCTION   : lookupFwkName
2801 *
2802 * DESCRIPTION: In case the enum is not same in fwk and backend
2803 *              make sure the parameter is correctly propogated
2804 *
2805 * PARAMETERS  :
2806 *   @arr      : map between the two enums
2807 *   @len      : len of the map
2808 *   @hal_name : name of the hal_parm to map
2809 *
2810 * RETURN     : int type of status
2811 *              fwk_name  -- success
2812 *              none-zero failure code
2813 *==========================================================================*/
2814int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2815                                             int len, int hal_name)
2816{
2817
2818    for (int i = 0; i < len; i++) {
2819        if (arr[i].hal_name == hal_name)
2820            return arr[i].fwk_name;
2821    }
2822
2823    /* Not able to find matching framework type is not necessarily
2824     * an error case. This happens when mm-camera supports more attributes
2825     * than the frameworks do */
2826    ALOGD("%s: Cannot find matching framework type", __func__);
2827    return NAME_NOT_FOUND;
2828}
2829
2830/*===========================================================================
2831 * FUNCTION   : lookupHalName
2832 *
2833 * DESCRIPTION: In case the enum is not same in fwk and backend
2834 *              make sure the parameter is correctly propogated
2835 *
2836 * PARAMETERS  :
2837 *   @arr      : map between the two enums
2838 *   @len      : len of the map
2839 *   @fwk_name : name of the hal_parm to map
2840 *
2841 * RETURN     : int32_t type of status
2842 *              hal_name  -- success
2843 *              none-zero failure code
2844 *==========================================================================*/
2845int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2846                                             int len, int fwk_name)
2847{
2848    for (int i = 0; i < len; i++) {
2849       if (arr[i].fwk_name == fwk_name)
2850           return arr[i].hal_name;
2851    }
2852    ALOGE("%s: Cannot find matching hal type", __func__);
2853    return NAME_NOT_FOUND;
2854}
2855
2856/*===========================================================================
2857 * FUNCTION   : getCapabilities
2858 *
2859 * DESCRIPTION: query camera capabilities
2860 *
2861 * PARAMETERS :
2862 *   @cameraId  : camera Id
2863 *   @info      : camera info struct to be filled in with camera capabilities
2864 *
2865 * RETURN     : int32_t type of status
2866 *              NO_ERROR  -- success
2867 *              none-zero failure code
2868 *==========================================================================*/
2869int QCamera3HardwareInterface::getCamInfo(int cameraId,
2870                                    struct camera_info *info)
2871{
2872    int rc = 0;
2873
2874    if (NULL == gCamCapability[cameraId]) {
2875        rc = initCapabilities(cameraId);
2876        if (rc < 0) {
2877            //pthread_mutex_unlock(&g_camlock);
2878            return rc;
2879        }
2880    }
2881
2882    if (NULL == gStaticMetadata[cameraId]) {
2883        rc = initStaticMetadata(cameraId);
2884        if (rc < 0) {
2885            return rc;
2886        }
2887    }
2888
2889    switch(gCamCapability[cameraId]->position) {
2890    case CAM_POSITION_BACK:
2891        info->facing = CAMERA_FACING_BACK;
2892        break;
2893
2894    case CAM_POSITION_FRONT:
2895        info->facing = CAMERA_FACING_FRONT;
2896        break;
2897
2898    default:
2899        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2900        rc = -1;
2901        break;
2902    }
2903
2904
2905    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2906    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2907    info->static_camera_characteristics = gStaticMetadata[cameraId];
2908
2909    return rc;
2910}
2911
2912/*===========================================================================
2913 * FUNCTION   : translateMetadata
2914 *
2915 * DESCRIPTION: translate the metadata into camera_metadata_t
2916 *
2917 * PARAMETERS : type of the request
2918 *
2919 *
2920 * RETURN     : success: camera_metadata_t*
2921 *              failure: NULL
2922 *
2923 *==========================================================================*/
2924camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2925{
2926    pthread_mutex_lock(&mMutex);
2927
2928    if (mDefaultMetadata[type] != NULL) {
2929        pthread_mutex_unlock(&mMutex);
2930        return mDefaultMetadata[type];
2931    }
2932    //first time we are handling this request
2933    //fill up the metadata structure using the wrapper class
2934    CameraMetadata settings;
2935    //translate from cam_capability_t to camera_metadata_tag_t
2936    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2937    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2938    int32_t defaultRequestID = 0;
2939    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
2940
2941    /*control*/
2942
2943    uint8_t controlIntent = 0;
2944    switch (type) {
2945      case CAMERA3_TEMPLATE_PREVIEW:
2946        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2947        break;
2948      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2949        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2950        break;
2951      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2952        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2953        break;
2954      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2955        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2956        break;
2957      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2958        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2959        break;
2960      default:
2961        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2962        break;
2963    }
2964    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2965
2966    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2967            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2968
2969    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2970    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2971
2972    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2973    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2974
2975    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2976    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2977
2978    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2979    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2980
2981    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2982    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2983
2984    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2985    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2986
2987    static uint8_t focusMode;
2988    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2989        ALOGE("%s: Setting focus mode to auto", __func__);
2990        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2991    } else {
2992        ALOGE("%s: Setting focus mode to off", __func__);
2993        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2994    }
2995    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2996
2997    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2998    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2999
3000    /*flash*/
3001    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3002    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3003
3004    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3005    settings.update(ANDROID_FLASH_FIRING_POWER,
3006            &flashFiringLevel, 1);
3007
3008    /* lens */
3009    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3010    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3011
3012    if (gCamCapability[mCameraId]->filter_densities_count) {
3013        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3014        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3015                        gCamCapability[mCameraId]->filter_densities_count);
3016    }
3017
3018    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3019    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3020
3021    /* Exposure time(Update the Min Exposure Time)*/
3022    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3023    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3024
3025    /* frame duration */
3026    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3027    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3028
3029    /* sensitivity */
3030    static const int32_t default_sensitivity = 100;
3031    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3032
3033    /*edge mode*/
3034    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3035    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3036
3037    /*noise reduction mode*/
3038    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3039    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3040
3041    /*color correction mode*/
3042    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3043    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3044
3045    /*transform matrix mode*/
3046    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3047    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3048
3049    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3050    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3051
3052    mDefaultMetadata[type] = settings.release();
3053
3054    pthread_mutex_unlock(&mMutex);
3055    return mDefaultMetadata[type];
3056}
3057
3058/*===========================================================================
3059 * FUNCTION   : setFrameParameters
3060 *
3061 * DESCRIPTION: set parameters per frame as requested in the metadata from
3062 *              framework
3063 *
3064 * PARAMETERS :
3065 *   @request   : request that needs to be serviced
3066 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3067 *
3068 * RETURN     : success: NO_ERROR
3069 *              failure:
3070 *==========================================================================*/
3071int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3072                    uint32_t streamTypeMask)
3073{
3074    /*translate from camera_metadata_t type to parm_type_t*/
3075    int rc = 0;
3076    if (request->settings == NULL && mFirstRequest) {
3077        /*settings cannot be null for the first request*/
3078        return BAD_VALUE;
3079    }
3080
3081    int32_t hal_version = CAM_HAL_V3;
3082
3083    memset(mParameters, 0, sizeof(parm_buffer_t));
3084    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3085    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3086                sizeof(hal_version), &hal_version);
3087    if (rc < 0) {
3088        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3089        return BAD_VALUE;
3090    }
3091
3092    /*we need to update the frame number in the parameters*/
3093    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3094                                sizeof(request->frame_number), &(request->frame_number));
3095    if (rc < 0) {
3096        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3097        return BAD_VALUE;
3098    }
3099
3100    /* Update stream id mask where buffers are requested */
3101    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3102                                sizeof(streamTypeMask), &streamTypeMask);
3103    if (rc < 0) {
3104        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3105        return BAD_VALUE;
3106    }
3107
3108    if(request->settings != NULL){
3109        rc = translateMetadataToParameters(request);
3110    }
3111    /*set the parameters to backend*/
3112    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3113    return rc;
3114}
3115
3116/*===========================================================================
3117 * FUNCTION   : translateMetadataToParameters
3118 *
3119 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3120 *
3121 *
3122 * PARAMETERS :
3123 *   @request  : request sent from framework
3124 *
3125 *
3126 * RETURN     : success: NO_ERROR
3127 *              failure:
3128 *==========================================================================*/
3129int QCamera3HardwareInterface::translateMetadataToParameters
3130                                  (const camera3_capture_request_t *request)
3131{
3132    int rc = 0;
3133    CameraMetadata frame_settings;
3134    frame_settings = request->settings;
3135
3136    /* Do not change the order of the following list unless you know what you are
3137     * doing.
3138     * The order is laid out in such a way that parameters in the front of the table
3139     * may be used to override the parameters later in the table. Examples are:
3140     * 1. META_MODE should precede AEC/AWB/AF MODE
3141     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3142     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3143     * 4. Any mode should precede it's corresponding settings
3144     */
3145    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3146        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3147        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3148                sizeof(metaMode), &metaMode);
3149        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3150           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3151           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3152                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3153                                             fwk_sceneMode);
3154           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3155                sizeof(sceneMode), &sceneMode);
3156        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3157           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3158           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3159                sizeof(sceneMode), &sceneMode);
3160        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3161           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3162           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3163                sizeof(sceneMode), &sceneMode);
3164        }
3165    }
3166
3167    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3168        uint8_t fwk_aeMode =
3169            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3170        uint8_t aeMode;
3171        int32_t redeye;
3172
3173        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3174            aeMode = CAM_AE_MODE_OFF;
3175        } else {
3176            aeMode = CAM_AE_MODE_ON;
3177        }
3178        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3179            redeye = 1;
3180        } else {
3181            redeye = 0;
3182        }
3183
3184        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3185                                          sizeof(AE_FLASH_MODE_MAP),
3186                                          fwk_aeMode);
3187        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3188                sizeof(aeMode), &aeMode);
3189        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3190                sizeof(flashMode), &flashMode);
3191        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3192                sizeof(redeye), &redeye);
3193    }
3194
3195    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3196        uint8_t fwk_whiteLevel =
3197            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3198        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3199                sizeof(WHITE_BALANCE_MODES_MAP),
3200                fwk_whiteLevel);
3201        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3202                sizeof(whiteLevel), &whiteLevel);
3203    }
3204
3205    float focalDistance = -1.0;
3206    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3207        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3208        rc = AddSetParmEntryToBatch(mParameters,
3209                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3210                sizeof(focalDistance), &focalDistance);
3211    }
3212
3213    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3214        uint8_t fwk_focusMode =
3215            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3216        uint8_t focusMode;
3217        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3218            focusMode = CAM_FOCUS_MODE_INFINITY;
3219        } else{
3220         focusMode = lookupHalName(FOCUS_MODES_MAP,
3221                                   sizeof(FOCUS_MODES_MAP),
3222                                   fwk_focusMode);
3223        }
3224        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3225                sizeof(focusMode), &focusMode);
3226    }
3227
3228    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3229        int32_t antibandingMode =
3230            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3231        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3232                sizeof(antibandingMode), &antibandingMode);
3233    }
3234
3235    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3236        int32_t expCompensation = frame_settings.find(
3237            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3238        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3239            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3240        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3241            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3242        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3243          sizeof(expCompensation), &expCompensation);
3244    }
3245
3246    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3247        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3248        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3249                sizeof(aeLock), &aeLock);
3250    }
3251    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3252        cam_fps_range_t fps_range;
3253        fps_range.min_fps =
3254            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3255        fps_range.max_fps =
3256            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3257        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3258                sizeof(fps_range), &fps_range);
3259    }
3260
3261    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3262        uint8_t awbLock =
3263            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3264        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3265                sizeof(awbLock), &awbLock);
3266    }
3267
3268    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3269        uint8_t fwk_effectMode =
3270            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3271        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3272                sizeof(EFFECT_MODES_MAP),
3273                fwk_effectMode);
3274        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3275                sizeof(effectMode), &effectMode);
3276    }
3277
3278    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3279        uint8_t colorCorrectMode =
3280            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3281        rc =
3282            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3283                    sizeof(colorCorrectMode), &colorCorrectMode);
3284    }
3285
3286    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3287        cam_color_correct_gains_t colorCorrectGains;
3288        for (int i = 0; i < 4; i++) {
3289            colorCorrectGains.gains[i] =
3290                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3291        }
3292        rc =
3293            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3294                    sizeof(colorCorrectGains), &colorCorrectGains);
3295    }
3296
3297    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3298        cam_color_correct_matrix_t colorCorrectTransform;
3299        cam_rational_type_t transform_elem;
3300        int num = 0;
3301        for (int i = 0; i < 3; i++) {
3302           for (int j = 0; j < 3; j++) {
3303              transform_elem.numerator =
3304                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3305              transform_elem.denominator =
3306                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3307              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3308              num++;
3309           }
3310        }
3311        rc =
3312            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3313                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3314    }
3315
3316    cam_trigger_t aecTrigger;
3317    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3318    aecTrigger.trigger_id = -1;
3319    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3320        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3321        aecTrigger.trigger =
3322            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3323        aecTrigger.trigger_id =
3324            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3325    }
3326    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3327                                sizeof(aecTrigger), &aecTrigger);
3328
3329    /*af_trigger must come with a trigger id*/
3330    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3331        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3332        cam_trigger_t af_trigger;
3333        af_trigger.trigger =
3334            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3335        af_trigger.trigger_id =
3336            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3337        rc = AddSetParmEntryToBatch(mParameters,
3338                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3339    }
3340
3341    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3342        int32_t demosaic =
3343            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3344        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3345                sizeof(demosaic), &demosaic);
3346    }
3347
3348    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3349        cam_edge_application_t edge_application;
3350        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3351        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3352            edge_application.sharpness = 0;
3353        } else {
3354            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3355                int32_t edgeStrength =
3356                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3357                edge_application.sharpness = edgeStrength;
3358            } else {
3359                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3360            }
3361        }
3362        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3363                sizeof(edge_application), &edge_application);
3364    }
3365
3366    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3367        int32_t respectFlashMode = 1;
3368        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3369            uint8_t fwk_aeMode =
3370                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3371            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3372                respectFlashMode = 0;
3373                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3374                    __func__);
3375            }
3376        }
3377        if (respectFlashMode) {
3378            uint8_t flashMode =
3379                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3380            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3381                                          sizeof(FLASH_MODES_MAP),
3382                                          flashMode);
3383            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3384            // To check: CAM_INTF_META_FLASH_MODE usage
3385            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3386                          sizeof(flashMode), &flashMode);
3387        }
3388    }
3389
3390    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3391        uint8_t flashPower =
3392            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3393        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3394                sizeof(flashPower), &flashPower);
3395    }
3396
3397    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3398        int64_t flashFiringTime =
3399            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3400        rc = AddSetParmEntryToBatch(mParameters,
3401                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3402    }
3403
3404    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3405        uint8_t geometricMode =
3406            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3407        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3408                sizeof(geometricMode), &geometricMode);
3409    }
3410
3411    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3412        uint8_t geometricStrength =
3413            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3414        rc = AddSetParmEntryToBatch(mParameters,
3415                CAM_INTF_META_GEOMETRIC_STRENGTH,
3416                sizeof(geometricStrength), &geometricStrength);
3417    }
3418
3419    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3420        uint8_t hotPixelMode =
3421            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3422        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3423                sizeof(hotPixelMode), &hotPixelMode);
3424    }
3425
3426    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3427        float lensAperture =
3428            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3429        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3430                sizeof(lensAperture), &lensAperture);
3431    }
3432
3433    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3434        float filterDensity =
3435            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3436        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3437                sizeof(filterDensity), &filterDensity);
3438    }
3439
3440    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3441        float focalLength =
3442            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3443        rc = AddSetParmEntryToBatch(mParameters,
3444                CAM_INTF_META_LENS_FOCAL_LENGTH,
3445                sizeof(focalLength), &focalLength);
3446    }
3447
3448    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3449        uint8_t optStabMode =
3450            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3451        rc = AddSetParmEntryToBatch(mParameters,
3452                CAM_INTF_META_LENS_OPT_STAB_MODE,
3453                sizeof(optStabMode), &optStabMode);
3454    }
3455
3456    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3457        uint8_t noiseRedMode =
3458            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3459        rc = AddSetParmEntryToBatch(mParameters,
3460                CAM_INTF_META_NOISE_REDUCTION_MODE,
3461                sizeof(noiseRedMode), &noiseRedMode);
3462    }
3463
3464    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3465        uint8_t noiseRedStrength =
3466            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3467        rc = AddSetParmEntryToBatch(mParameters,
3468                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3469                sizeof(noiseRedStrength), &noiseRedStrength);
3470    }
3471
3472    cam_crop_region_t scalerCropRegion;
3473    bool scalerCropSet = false;
3474    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3475        scalerCropRegion.left =
3476            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3477        scalerCropRegion.top =
3478            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3479        scalerCropRegion.width =
3480            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3481        scalerCropRegion.height =
3482            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3483        rc = AddSetParmEntryToBatch(mParameters,
3484                CAM_INTF_META_SCALER_CROP_REGION,
3485                sizeof(scalerCropRegion), &scalerCropRegion);
3486        scalerCropSet = true;
3487    }
3488
3489    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3490        int64_t sensorExpTime =
3491            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3492        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3493        rc = AddSetParmEntryToBatch(mParameters,
3494                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3495                sizeof(sensorExpTime), &sensorExpTime);
3496    }
3497
3498    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3499        int64_t sensorFrameDuration =
3500            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3501        int64_t minFrameDuration = getMinFrameDuration(request);
3502        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3503        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3504            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3505        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3506        rc = AddSetParmEntryToBatch(mParameters,
3507                CAM_INTF_META_SENSOR_FRAME_DURATION,
3508                sizeof(sensorFrameDuration), &sensorFrameDuration);
3509    }
3510
3511    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3512        int32_t sensorSensitivity =
3513            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3514        if (sensorSensitivity <
3515                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3516            sensorSensitivity =
3517                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3518        if (sensorSensitivity >
3519                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3520            sensorSensitivity =
3521                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3522        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3523        rc = AddSetParmEntryToBatch(mParameters,
3524                CAM_INTF_META_SENSOR_SENSITIVITY,
3525                sizeof(sensorSensitivity), &sensorSensitivity);
3526    }
3527
3528    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3529        int32_t shadingMode =
3530            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3531        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3532                sizeof(shadingMode), &shadingMode);
3533    }
3534
3535    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3536        uint8_t shadingStrength =
3537            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3538        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3539                sizeof(shadingStrength), &shadingStrength);
3540    }
3541
3542    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3543        uint8_t fwk_facedetectMode =
3544            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3545        uint8_t facedetectMode =
3546            lookupHalName(FACEDETECT_MODES_MAP,
3547                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3548        rc = AddSetParmEntryToBatch(mParameters,
3549                CAM_INTF_META_STATS_FACEDETECT_MODE,
3550                sizeof(facedetectMode), &facedetectMode);
3551    }
3552
3553    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3554        uint8_t histogramMode =
3555            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3556        rc = AddSetParmEntryToBatch(mParameters,
3557                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3558                sizeof(histogramMode), &histogramMode);
3559    }
3560
3561    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3562        uint8_t sharpnessMapMode =
3563            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3564        rc = AddSetParmEntryToBatch(mParameters,
3565                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3566                sizeof(sharpnessMapMode), &sharpnessMapMode);
3567    }
3568
3569    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3570        uint8_t tonemapMode =
3571            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3572        rc = AddSetParmEntryToBatch(mParameters,
3573                CAM_INTF_META_TONEMAP_MODE,
3574                sizeof(tonemapMode), &tonemapMode);
3575    }
3576    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3577    /*All tonemap channels will have the same number of points*/
3578    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3579        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3580        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3581        cam_rgb_tonemap_curves tonemapCurves;
3582        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3583
3584        /* ch0 = G*/
3585        int point = 0;
3586        cam_tonemap_curve_t tonemapCurveGreen;
3587        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3588            for (int j = 0; j < 2; j++) {
3589               tonemapCurveGreen.tonemap_points[i][j] =
3590                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3591               point++;
3592            }
3593        }
3594        tonemapCurves.curves[0] = tonemapCurveGreen;
3595
3596        /* ch 1 = B */
3597        point = 0;
3598        cam_tonemap_curve_t tonemapCurveBlue;
3599        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3600            for (int j = 0; j < 2; j++) {
3601               tonemapCurveBlue.tonemap_points[i][j] =
3602                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3603               point++;
3604            }
3605        }
3606        tonemapCurves.curves[1] = tonemapCurveBlue;
3607
3608        /* ch 2 = R */
3609        point = 0;
3610        cam_tonemap_curve_t tonemapCurveRed;
3611        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3612            for (int j = 0; j < 2; j++) {
3613               tonemapCurveRed.tonemap_points[i][j] =
3614                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3615               point++;
3616            }
3617        }
3618        tonemapCurves.curves[2] = tonemapCurveRed;
3619
3620        rc = AddSetParmEntryToBatch(mParameters,
3621                CAM_INTF_META_TONEMAP_CURVES,
3622                sizeof(tonemapCurves), &tonemapCurves);
3623    }
3624
3625    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3626        uint8_t captureIntent =
3627            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3628        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3629                sizeof(captureIntent), &captureIntent);
3630    }
3631
3632    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3633        uint8_t blackLevelLock =
3634            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3635        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3636                sizeof(blackLevelLock), &blackLevelLock);
3637    }
3638
3639    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3640        uint8_t lensShadingMapMode =
3641            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3642        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3643                sizeof(lensShadingMapMode), &lensShadingMapMode);
3644    }
3645
3646    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3647        cam_area_t roi;
3648        bool reset = true;
3649        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3650        if (scalerCropSet) {
3651            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3652        }
3653        if (reset) {
3654            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3655                    sizeof(roi), &roi);
3656        }
3657    }
3658
3659    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3660        cam_area_t roi;
3661        bool reset = true;
3662        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3663        if (scalerCropSet) {
3664            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3665        }
3666        if (reset) {
3667            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3668                    sizeof(roi), &roi);
3669        }
3670    }
3671
3672    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3673        cam_area_t roi;
3674        bool reset = true;
3675        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3676        if (scalerCropSet) {
3677            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3678        }
3679        if (reset) {
3680            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3681                    sizeof(roi), &roi);
3682        }
3683    }
3684    return rc;
3685}
3686
3687/*===========================================================================
3688 * FUNCTION   : getJpegSettings
3689 *
3690 * DESCRIPTION: save the jpeg settings in the HAL
3691 *
3692 *
3693 * PARAMETERS :
3694 *   @settings  : frame settings information from framework
3695 *
3696 *
3697 * RETURN     : success: NO_ERROR
3698 *              failure:
3699 *==========================================================================*/
3700int QCamera3HardwareInterface::getJpegSettings
3701                                  (const camera_metadata_t *settings)
3702{
3703    if (mJpegSettings) {
3704        if (mJpegSettings->gps_timestamp) {
3705            free(mJpegSettings->gps_timestamp);
3706            mJpegSettings->gps_timestamp = NULL;
3707        }
3708        if (mJpegSettings->gps_coordinates) {
3709            for (int i = 0; i < 3; i++) {
3710                free(mJpegSettings->gps_coordinates[i]);
3711                mJpegSettings->gps_coordinates[i] = NULL;
3712            }
3713        }
3714        free(mJpegSettings);
3715        mJpegSettings = NULL;
3716    }
3717    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3718    CameraMetadata jpeg_settings;
3719    jpeg_settings = settings;
3720
3721    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3722        mJpegSettings->jpeg_orientation =
3723            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3724    } else {
3725        mJpegSettings->jpeg_orientation = 0;
3726    }
3727    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3728        mJpegSettings->jpeg_quality =
3729            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3730    } else {
3731        mJpegSettings->jpeg_quality = 85;
3732    }
3733    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3734        mJpegSettings->thumbnail_size.width =
3735            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3736        mJpegSettings->thumbnail_size.height =
3737            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3738    } else {
3739        mJpegSettings->thumbnail_size.width = 0;
3740        mJpegSettings->thumbnail_size.height = 0;
3741    }
3742    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3743        for (int i = 0; i < 3; i++) {
3744            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3745            *(mJpegSettings->gps_coordinates[i]) =
3746                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3747        }
3748    } else{
3749       for (int i = 0; i < 3; i++) {
3750            mJpegSettings->gps_coordinates[i] = NULL;
3751        }
3752    }
3753
3754    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3755        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3756        *(mJpegSettings->gps_timestamp) =
3757            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3758    } else {
3759        mJpegSettings->gps_timestamp = NULL;
3760    }
3761
3762    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3763        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3764        for (int i = 0; i < len; i++) {
3765            mJpegSettings->gps_processing_method[i] =
3766                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3767        }
3768        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3769            mJpegSettings->gps_processing_method[len] = '\0';
3770        }
3771    } else {
3772        mJpegSettings->gps_processing_method[0] = '\0';
3773    }
3774
3775    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3776        mJpegSettings->sensor_sensitivity =
3777            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3778    } else {
3779        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3780    }
3781
3782    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3783
3784    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3785        mJpegSettings->lens_focal_length =
3786            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3787    }
3788    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3789        mJpegSettings->exposure_compensation =
3790            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3791    }
3792    mJpegSettings->sharpness = 10; //default value
3793    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3794        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3795        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3796            mJpegSettings->sharpness = 0;
3797        }
3798    }
3799    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3800    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3801    mJpegSettings->is_jpeg_format = true;
3802    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3803    return 0;
3804}
3805
3806/*===========================================================================
3807 * FUNCTION   : captureResultCb
3808 *
3809 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3810 *
3811 * PARAMETERS :
3812 *   @frame  : frame information from mm-camera-interface
3813 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3814 *   @userdata: userdata
3815 *
3816 * RETURN     : NONE
3817 *==========================================================================*/
3818void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3819                camera3_stream_buffer_t *buffer,
3820                uint32_t frame_number, void *userdata)
3821{
3822    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3823    if (hw == NULL) {
3824        ALOGE("%s: Invalid hw %p", __func__, hw);
3825        return;
3826    }
3827
3828    hw->captureResultCb(metadata, buffer, frame_number);
3829    return;
3830}
3831
3832
3833/*===========================================================================
3834 * FUNCTION   : initialize
3835 *
3836 * DESCRIPTION: Pass framework callback pointers to HAL
3837 *
3838 * PARAMETERS :
3839 *
3840 *
3841 * RETURN     : Success : 0
3842 *              Failure: -ENODEV
3843 *==========================================================================*/
3844
3845int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3846                                  const camera3_callback_ops_t *callback_ops)
3847{
3848    ALOGV("%s: E", __func__);
3849    QCamera3HardwareInterface *hw =
3850        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3851    if (!hw) {
3852        ALOGE("%s: NULL camera device", __func__);
3853        return -ENODEV;
3854    }
3855
3856    int rc = hw->initialize(callback_ops);
3857    ALOGV("%s: X", __func__);
3858    return rc;
3859}
3860
3861/*===========================================================================
3862 * FUNCTION   : configure_streams
3863 *
3864 * DESCRIPTION:
3865 *
3866 * PARAMETERS :
3867 *
3868 *
3869 * RETURN     : Success: 0
3870 *              Failure: -EINVAL (if stream configuration is invalid)
3871 *                       -ENODEV (fatal error)
3872 *==========================================================================*/
3873
3874int QCamera3HardwareInterface::configure_streams(
3875        const struct camera3_device *device,
3876        camera3_stream_configuration_t *stream_list)
3877{
3878    ALOGV("%s: E", __func__);
3879    QCamera3HardwareInterface *hw =
3880        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3881    if (!hw) {
3882        ALOGE("%s: NULL camera device", __func__);
3883        return -ENODEV;
3884    }
3885    int rc = hw->configureStreams(stream_list);
3886    ALOGV("%s: X", __func__);
3887    return rc;
3888}
3889
3890/*===========================================================================
3891 * FUNCTION   : register_stream_buffers
3892 *
3893 * DESCRIPTION: Register stream buffers with the device
3894 *
3895 * PARAMETERS :
3896 *
3897 * RETURN     :
3898 *==========================================================================*/
3899int QCamera3HardwareInterface::register_stream_buffers(
3900        const struct camera3_device *device,
3901        const camera3_stream_buffer_set_t *buffer_set)
3902{
3903    ALOGV("%s: E", __func__);
3904    QCamera3HardwareInterface *hw =
3905        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3906    if (!hw) {
3907        ALOGE("%s: NULL camera device", __func__);
3908        return -ENODEV;
3909    }
3910    int rc = hw->registerStreamBuffers(buffer_set);
3911    ALOGV("%s: X", __func__);
3912    return rc;
3913}
3914
3915/*===========================================================================
3916 * FUNCTION   : construct_default_request_settings
3917 *
3918 * DESCRIPTION: Configure a settings buffer to meet the required use case
3919 *
3920 * PARAMETERS :
3921 *
3922 *
3923 * RETURN     : Success: Return valid metadata
3924 *              Failure: Return NULL
3925 *==========================================================================*/
3926const camera_metadata_t* QCamera3HardwareInterface::
3927    construct_default_request_settings(const struct camera3_device *device,
3928                                        int type)
3929{
3930
3931    ALOGV("%s: E", __func__);
3932    camera_metadata_t* fwk_metadata = NULL;
3933    QCamera3HardwareInterface *hw =
3934        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3935    if (!hw) {
3936        ALOGE("%s: NULL camera device", __func__);
3937        return NULL;
3938    }
3939
3940    fwk_metadata = hw->translateCapabilityToMetadata(type);
3941
3942    ALOGV("%s: X", __func__);
3943    return fwk_metadata;
3944}
3945
3946/*===========================================================================
3947 * FUNCTION   : process_capture_request
3948 *
3949 * DESCRIPTION:
3950 *
3951 * PARAMETERS :
3952 *
3953 *
3954 * RETURN     :
3955 *==========================================================================*/
3956int QCamera3HardwareInterface::process_capture_request(
3957                    const struct camera3_device *device,
3958                    camera3_capture_request_t *request)
3959{
3960    ALOGV("%s: E", __func__);
3961    QCamera3HardwareInterface *hw =
3962        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3963    if (!hw) {
3964        ALOGE("%s: NULL camera device", __func__);
3965        return -EINVAL;
3966    }
3967
3968    int rc = hw->processCaptureRequest(request);
3969    ALOGV("%s: X", __func__);
3970    return rc;
3971}
3972
3973/*===========================================================================
3974 * FUNCTION   : get_metadata_vendor_tag_ops
3975 *
3976 * DESCRIPTION:
3977 *
3978 * PARAMETERS :
3979 *
3980 *
3981 * RETURN     :
3982 *==========================================================================*/
3983
3984void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3985                const struct camera3_device *device,
3986                vendor_tag_query_ops_t* ops)
3987{
3988    ALOGV("%s: E", __func__);
3989    QCamera3HardwareInterface *hw =
3990        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3991    if (!hw) {
3992        ALOGE("%s: NULL camera device", __func__);
3993        return;
3994    }
3995
3996    hw->getMetadataVendorTagOps(ops);
3997    ALOGV("%s: X", __func__);
3998    return;
3999}
4000
4001/*===========================================================================
4002 * FUNCTION   : dump
4003 *
4004 * DESCRIPTION:
4005 *
4006 * PARAMETERS :
4007 *
4008 *
4009 * RETURN     :
4010 *==========================================================================*/
4011
4012void QCamera3HardwareInterface::dump(
4013                const struct camera3_device *device, int fd)
4014{
4015    ALOGV("%s: E", __func__);
4016    QCamera3HardwareInterface *hw =
4017        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4018    if (!hw) {
4019        ALOGE("%s: NULL camera device", __func__);
4020        return;
4021    }
4022
4023    hw->dump(fd);
4024    ALOGV("%s: X", __func__);
4025    return;
4026}
4027
4028/*===========================================================================
4029 * FUNCTION   : flush
4030 *
4031 * DESCRIPTION:
4032 *
4033 * PARAMETERS :
4034 *
4035 *
4036 * RETURN     :
4037 *==========================================================================*/
4038
4039int QCamera3HardwareInterface::flush(
4040                const struct camera3_device *device)
4041{
4042    int rc;
4043    ALOGV("%s: E", __func__);
4044    QCamera3HardwareInterface *hw =
4045        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4046    if (!hw) {
4047        ALOGE("%s: NULL camera device", __func__);
4048        return -EINVAL;
4049    }
4050
4051    rc = hw->flush();
4052    ALOGV("%s: X", __func__);
4053    return rc;
4054}
4055
4056/*===========================================================================
4057 * FUNCTION   : close_camera_device
4058 *
4059 * DESCRIPTION:
4060 *
4061 * PARAMETERS :
4062 *
4063 *
4064 * RETURN     :
4065 *==========================================================================*/
4066int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4067{
4068    ALOGV("%s: E", __func__);
4069    int ret = NO_ERROR;
4070    QCamera3HardwareInterface *hw =
4071        reinterpret_cast<QCamera3HardwareInterface *>(
4072            reinterpret_cast<camera3_device_t *>(device)->priv);
4073    if (!hw) {
4074        ALOGE("NULL camera device");
4075        return BAD_VALUE;
4076    }
4077    delete hw;
4078
4079    pthread_mutex_lock(&mCameraSessionLock);
4080    mCameraSessionActive = 0;
4081    pthread_mutex_unlock(&mCameraSessionLock);
4082    ALOGV("%s: X", __func__);
4083    return ret;
4084}
4085
4086/*===========================================================================
4087 * FUNCTION   : getWaveletDenoiseProcessPlate
4088 *
4089 * DESCRIPTION: query wavelet denoise process plate
4090 *
4091 * PARAMETERS : None
4092 *
4093 * RETURN     : WNR prcocess plate vlaue
4094 *==========================================================================*/
4095cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4096{
4097    char prop[PROPERTY_VALUE_MAX];
4098    memset(prop, 0, sizeof(prop));
4099    property_get("persist.denoise.process.plates", prop, "0");
4100    int processPlate = atoi(prop);
4101    switch(processPlate) {
4102    case 0:
4103        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4104    case 1:
4105        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4106    case 2:
4107        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4108    case 3:
4109        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4110    default:
4111        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4112    }
4113}
4114
4115/*===========================================================================
4116 * FUNCTION   : needRotationReprocess
4117 *
4118 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4119 *
4120 * PARAMETERS : none
4121 *
4122 * RETURN     : true: needed
4123 *              false: no need
4124 *==========================================================================*/
4125bool QCamera3HardwareInterface::needRotationReprocess()
4126{
4127
4128    if (!mJpegSettings->is_jpeg_format) {
4129        // RAW image, no need to reprocess
4130        return false;
4131    }
4132
4133    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4134        mJpegSettings->jpeg_orientation > 0) {
4135        // current rotation is not zero, and pp has the capability to process rotation
4136        ALOGD("%s: need do reprocess for rotation", __func__);
4137        return true;
4138    }
4139
4140    return false;
4141}
4142
4143/*===========================================================================
4144 * FUNCTION   : needReprocess
4145 *
4146 * DESCRIPTION: if reprocess in needed
4147 *
4148 * PARAMETERS : none
4149 *
4150 * RETURN     : true: needed
4151 *              false: no need
4152 *==========================================================================*/
4153bool QCamera3HardwareInterface::needReprocess()
4154{
4155    if (!mJpegSettings->is_jpeg_format) {
4156        // RAW image, no need to reprocess
4157        return false;
4158    }
4159
4160    if ((mJpegSettings->min_required_pp_mask > 0) ||
4161         isWNREnabled()) {
4162        // TODO: add for ZSL HDR later
4163        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4164        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4165        return true;
4166    }
4167    return needRotationReprocess();
4168}
4169
4170/*===========================================================================
4171 * FUNCTION   : addOnlineReprocChannel
4172 *
4173 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4174 *              coming from input channel
4175 *
4176 * PARAMETERS :
4177 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4178 *
4179 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4180 *==========================================================================*/
4181QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4182              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4183{
4184    int32_t rc = NO_ERROR;
4185    QCamera3ReprocessChannel *pChannel = NULL;
4186    if (pInputChannel == NULL) {
4187        ALOGE("%s: input channel obj is NULL", __func__);
4188        return NULL;
4189    }
4190
4191    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4192            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4193    if (NULL == pChannel) {
4194        ALOGE("%s: no mem for reprocess channel", __func__);
4195        return NULL;
4196    }
4197
4198    // Capture channel, only need snapshot and postview streams start together
4199    mm_camera_channel_attr_t attr;
4200    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4201    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4202    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4203    rc = pChannel->initialize();
4204    if (rc != NO_ERROR) {
4205        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4206        delete pChannel;
4207        return NULL;
4208    }
4209
4210    // pp feature config
4211    cam_pp_feature_config_t pp_config;
4212    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4213    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4214        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4215        pp_config.sharpness = mJpegSettings->sharpness;
4216    }
4217
4218    if (isWNREnabled()) {
4219        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4220        pp_config.denoise2d.denoise_enable = 1;
4221        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4222    }
4223    if (needRotationReprocess()) {
4224        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4225        int rotation = mJpegSettings->jpeg_orientation;
4226        if (rotation == 0) {
4227            pp_config.rotation = ROTATE_0;
4228        } else if (rotation == 90) {
4229            pp_config.rotation = ROTATE_90;
4230        } else if (rotation == 180) {
4231            pp_config.rotation = ROTATE_180;
4232        } else if (rotation == 270) {
4233            pp_config.rotation = ROTATE_270;
4234        }
4235    }
4236
4237   rc = pChannel->addReprocStreamsFromSource(pp_config,
4238                                             pInputChannel,
4239                                             mMetadataChannel);
4240
4241    if (rc != NO_ERROR) {
4242        delete pChannel;
4243        return NULL;
4244    }
4245    return pChannel;
4246}
4247
4248int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4249{
4250    return gCamCapability[mCameraId]->min_num_pp_bufs;
4251}
4252
4253bool QCamera3HardwareInterface::isWNREnabled() {
4254    return gCamCapability[mCameraId]->isWnrSupported;
4255}
4256
4257}; //end namespace qcamera
4258