QCamera3HWI.cpp revision 1fca7d2974d2adb729b13ad51600e0b8d8478e48
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230
231    // NOTE: 'camera3_stream_t *' objects are already freed at
232    //        this stage by the framework
233    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
234        it != mStreamInfo.end(); it++) {
235        QCamera3Channel *channel = (*it)->channel;
236        if (channel) {
237            channel->stop();
238        }
239    }
240
241    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
242        it != mStreamInfo.end(); it++) {
243        QCamera3Channel *channel = (*it)->channel;
244        if ((*it)->registered && (*it)->buffer_set.buffers) {
245             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
246        }
247        if (channel)
248            delete channel;
249        free (*it);
250    }
251
252    mPictureChannel = NULL;
253
254    if (mJpegSettings != NULL) {
255        free(mJpegSettings);
256        mJpegSettings = NULL;
257    }
258
259    /* Clean up all channels */
260    if (mCameraInitialized) {
261        if (mMetadataChannel) {
262            mMetadataChannel->stop();
263            delete mMetadataChannel;
264            mMetadataChannel = NULL;
265        }
266        deinitParameters();
267    }
268
269    if (mCameraOpened)
270        closeCamera();
271
272    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
273        if (mDefaultMetadata[i])
274            free_camera_metadata(mDefaultMetadata[i]);
275
276    pthread_cond_destroy(&mRequestCond);
277
278    pthread_mutex_destroy(&mMutex);
279    ALOGV("%s: X", __func__);
280}
281
282/*===========================================================================
283 * FUNCTION   : openCamera
284 *
285 * DESCRIPTION: open camera
286 *
287 * PARAMETERS :
288 *   @hw_device  : double ptr for camera device struct
289 *
290 * RETURN     : int32_t type of status
291 *              NO_ERROR  -- success
292 *              none-zero failure code
293 *==========================================================================*/
294int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
295{
296    int rc = 0;
297    pthread_mutex_lock(&mCameraSessionLock);
298    if (mCameraSessionActive) {
299        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
300        pthread_mutex_unlock(&mCameraSessionLock);
301        return -EDQUOT;
302    }
303
304    if (mCameraOpened) {
305        *hw_device = NULL;
306        return PERMISSION_DENIED;
307    }
308
309    rc = openCamera();
310    if (rc == 0) {
311        *hw_device = &mCameraDevice.common;
312        mCameraSessionActive = 1;
313    } else
314        *hw_device = NULL;
315
316#ifdef HAS_MULTIMEDIA_HINTS
317    if (rc == 0) {
318        if (m_pPowerModule) {
319            if (m_pPowerModule->powerHint) {
320                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
321                        (void *)"state=1");
322            }
323        }
324    }
325#endif
326    pthread_mutex_unlock(&mCameraSessionLock);
327    return rc;
328}
329
330/*===========================================================================
331 * FUNCTION   : openCamera
332 *
333 * DESCRIPTION: open camera
334 *
335 * PARAMETERS : none
336 *
337 * RETURN     : int32_t type of status
338 *              NO_ERROR  -- success
339 *              none-zero failure code
340 *==========================================================================*/
341int QCamera3HardwareInterface::openCamera()
342{
343    if (mCameraHandle) {
344        ALOGE("Failure: Camera already opened");
345        return ALREADY_EXISTS;
346    }
347    mCameraHandle = camera_open(mCameraId);
348    if (!mCameraHandle) {
349        ALOGE("camera_open failed.");
350        return UNKNOWN_ERROR;
351    }
352
353    mCameraOpened = true;
354
355    return NO_ERROR;
356}
357
358/*===========================================================================
359 * FUNCTION   : closeCamera
360 *
361 * DESCRIPTION: close camera
362 *
363 * PARAMETERS : none
364 *
365 * RETURN     : int32_t type of status
366 *              NO_ERROR  -- success
367 *              none-zero failure code
368 *==========================================================================*/
369int QCamera3HardwareInterface::closeCamera()
370{
371    int rc = NO_ERROR;
372
373    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
374    mCameraHandle = NULL;
375    mCameraOpened = false;
376
377#ifdef HAS_MULTIMEDIA_HINTS
378    if (rc == NO_ERROR) {
379        if (m_pPowerModule) {
380            if (m_pPowerModule->powerHint) {
381                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
382                        (void *)"state=0");
383            }
384        }
385    }
386#endif
387
388    return rc;
389}
390
391/*===========================================================================
392 * FUNCTION   : initialize
393 *
394 * DESCRIPTION: Initialize frameworks callback functions
395 *
396 * PARAMETERS :
397 *   @callback_ops : callback function to frameworks
398 *
399 * RETURN     :
400 *
401 *==========================================================================*/
402int QCamera3HardwareInterface::initialize(
403        const struct camera3_callback_ops *callback_ops)
404{
405    int rc;
406
407    pthread_mutex_lock(&mMutex);
408
409    rc = initParameters();
410    if (rc < 0) {
411        ALOGE("%s: initParamters failed %d", __func__, rc);
412       goto err1;
413    }
414    mCallbackOps = callback_ops;
415
416    pthread_mutex_unlock(&mMutex);
417    mCameraInitialized = true;
418    return 0;
419
420err1:
421    pthread_mutex_unlock(&mMutex);
422    return rc;
423}
424
425/*===========================================================================
426 * FUNCTION   : configureStreams
427 *
428 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
429 *              and output streams.
430 *
431 * PARAMETERS :
432 *   @stream_list : streams to be configured
433 *
434 * RETURN     :
435 *
436 *==========================================================================*/
437int QCamera3HardwareInterface::configureStreams(
438        camera3_stream_configuration_t *streamList)
439{
440    int rc = 0;
441    mIsZslMode = false;
442
443    // Sanity check stream_list
444    if (streamList == NULL) {
445        ALOGE("%s: NULL stream configuration", __func__);
446        return BAD_VALUE;
447    }
448    if (streamList->streams == NULL) {
449        ALOGE("%s: NULL stream list", __func__);
450        return BAD_VALUE;
451    }
452
453    if (streamList->num_streams < 1) {
454        ALOGE("%s: Bad number of streams requested: %d", __func__,
455                streamList->num_streams);
456        return BAD_VALUE;
457    }
458
459    /* first invalidate all the steams in the mStreamList
460     * if they appear again, they will be validated */
461    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
462            it != mStreamInfo.end(); it++) {
463        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
464        channel->stop();
465        (*it)->status = INVALID;
466    }
467    if (mMetadataChannel) {
468        /* If content of mStreamInfo is not 0, there is metadata stream */
469        mMetadataChannel->stop();
470    }
471
472    pthread_mutex_lock(&mMutex);
473
474    camera3_stream_t *inputStream = NULL;
475    camera3_stream_t *jpegStream = NULL;
476    cam_stream_size_info_t stream_config_info;
477
478    for (size_t i = 0; i < streamList->num_streams; i++) {
479        camera3_stream_t *newStream = streamList->streams[i];
480        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
481                __func__, newStream->stream_type, newStream->format,
482                 newStream->width, newStream->height);
483        //if the stream is in the mStreamList validate it
484        bool stream_exists = false;
485        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
486                it != mStreamInfo.end(); it++) {
487            if ((*it)->stream == newStream) {
488                QCamera3Channel *channel =
489                    (QCamera3Channel*)(*it)->stream->priv;
490                stream_exists = true;
491                (*it)->status = RECONFIGURE;
492                /*delete the channel object associated with the stream because
493                  we need to reconfigure*/
494                delete channel;
495                (*it)->stream->priv = NULL;
496                (*it)->channel = NULL;
497            }
498        }
499        if (!stream_exists) {
500            //new stream
501            stream_info_t* stream_info;
502            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
503            stream_info->stream = newStream;
504            stream_info->status = VALID;
505            stream_info->registered = 0;
506            stream_info->channel = NULL;
507            mStreamInfo.push_back(stream_info);
508        }
509        if (newStream->stream_type == CAMERA3_STREAM_INPUT
510                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
511            if (inputStream != NULL) {
512                ALOGE("%s: Multiple input streams requested!", __func__);
513                pthread_mutex_unlock(&mMutex);
514                return BAD_VALUE;
515            }
516            inputStream = newStream;
517        }
518        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
519            jpegStream = newStream;
520        }
521    }
522    mInputStream = inputStream;
523
524    /*clean up invalid streams*/
525    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
526            it != mStreamInfo.end();) {
527        if(((*it)->status) == INVALID){
528            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
529            delete channel;
530            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
531            free(*it);
532            it = mStreamInfo.erase(it);
533        } else {
534            it++;
535        }
536    }
537    if (mMetadataChannel) {
538        delete mMetadataChannel;
539        mMetadataChannel = NULL;
540    }
541
542    //Create metadata channel and initialize it
543    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
544                    mCameraHandle->ops, captureResultCb,
545                    &gCamCapability[mCameraId]->padding_info, this);
546    if (mMetadataChannel == NULL) {
547        ALOGE("%s: failed to allocate metadata channel", __func__);
548        rc = -ENOMEM;
549        pthread_mutex_unlock(&mMutex);
550        return rc;
551    }
552    rc = mMetadataChannel->initialize();
553    if (rc < 0) {
554        ALOGE("%s: metadata channel initialization failed", __func__);
555        delete mMetadataChannel;
556        pthread_mutex_unlock(&mMutex);
557        return rc;
558    }
559
560    /* Allocate channel objects for the requested streams */
561    for (size_t i = 0; i < streamList->num_streams; i++) {
562        camera3_stream_t *newStream = streamList->streams[i];
563        uint32_t stream_usage = newStream->usage;
564        stream_config_info.stream_sizes[i].width = newStream->width;
565        stream_config_info.stream_sizes[i].height = newStream->height;
566        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
567            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
568            //for zsl stream the size is jpeg size
569            stream_config_info.stream_sizes[i].width = jpegStream->width;
570            stream_config_info.stream_sizes[i].height = jpegStream->height;
571            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
572        } else {
573           //for non zsl streams find out the format
574           switch (newStream->format) {
575           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
576              {
577                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
578                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
579                 } else {
580                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
581                 }
582              }
583              break;
584           case HAL_PIXEL_FORMAT_YCbCr_420_888:
585              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
586              break;
587           case HAL_PIXEL_FORMAT_BLOB:
588              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
589              break;
590           default:
591              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
592              break;
593           }
594        }
595        if (newStream->priv == NULL) {
596            //New stream, construct channel
597            switch (newStream->stream_type) {
598            case CAMERA3_STREAM_INPUT:
599                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
600                break;
601            case CAMERA3_STREAM_BIDIRECTIONAL:
602                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
603                    GRALLOC_USAGE_HW_CAMERA_WRITE;
604                break;
605            case CAMERA3_STREAM_OUTPUT:
606                /* For video encoding stream, set read/write rarely
607                 * flag so that they may be set to un-cached */
608                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
609                    newStream->usage =
610                         (GRALLOC_USAGE_SW_READ_RARELY |
611                         GRALLOC_USAGE_SW_WRITE_RARELY |
612                         GRALLOC_USAGE_HW_CAMERA_WRITE);
613                else
614                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
615                break;
616            default:
617                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
618                break;
619            }
620
621            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
622                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
623                QCamera3Channel *channel;
624                switch (newStream->format) {
625                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
626                case HAL_PIXEL_FORMAT_YCbCr_420_888:
627                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
628                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
629                        jpegStream) {
630                        uint32_t width = jpegStream->width;
631                        uint32_t height = jpegStream->height;
632                        mIsZslMode = true;
633                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
634                            mCameraHandle->ops, captureResultCb,
635                            &gCamCapability[mCameraId]->padding_info, this, newStream,
636                            width, height);
637                    } else
638                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
639                            mCameraHandle->ops, captureResultCb,
640                            &gCamCapability[mCameraId]->padding_info, this, newStream);
641                    if (channel == NULL) {
642                        ALOGE("%s: allocation of channel failed", __func__);
643                        pthread_mutex_unlock(&mMutex);
644                        return -ENOMEM;
645                    }
646
647                    newStream->priv = channel;
648                    break;
649                case HAL_PIXEL_FORMAT_BLOB:
650                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
651                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
652                            mCameraHandle->ops, captureResultCb,
653                            &gCamCapability[mCameraId]->padding_info, this, newStream);
654                    if (mPictureChannel == NULL) {
655                        ALOGE("%s: allocation of channel failed", __func__);
656                        pthread_mutex_unlock(&mMutex);
657                        return -ENOMEM;
658                    }
659                    newStream->priv = (QCamera3Channel*)mPictureChannel;
660                    break;
661
662                //TODO: Add support for app consumed format?
663                default:
664                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
665                    break;
666                }
667            }
668
669            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
670                    it != mStreamInfo.end(); it++) {
671                if ((*it)->stream == newStream) {
672                    (*it)->channel = (QCamera3Channel*) newStream->priv;
673                    break;
674                }
675            }
676        } else {
677            // Channel already exists for this stream
678            // Do nothing for now
679        }
680    }
681
682    int32_t hal_version = CAM_HAL_V3;
683    stream_config_info.num_streams = streamList->num_streams;
684
685    // settings/parameters don't carry over for new configureStreams
686    memset(mParameters, 0, sizeof(parm_buffer_t));
687
688    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
689    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
690                sizeof(hal_version), &hal_version);
691
692    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
693                sizeof(stream_config_info), &stream_config_info);
694
695    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
696
697    /*For the streams to be reconfigured we need to register the buffers
698      since the framework wont*/
699    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
700            it != mStreamInfo.end(); it++) {
701        if ((*it)->status == RECONFIGURE) {
702            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
703            /*only register buffers for streams that have already been
704              registered*/
705            if ((*it)->registered) {
706                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
707                        (*it)->buffer_set.buffers);
708                if (rc != NO_ERROR) {
709                    ALOGE("%s: Failed to register the buffers of old stream,\
710                            rc = %d", __func__, rc);
711                }
712                ALOGV("%s: channel %p has %d buffers",
713                        __func__, channel, (*it)->buffer_set.num_buffers);
714            }
715        }
716
717        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
718        if (index == NAME_NOT_FOUND) {
719            mPendingBuffersMap.add((*it)->stream, 0);
720        } else {
721            mPendingBuffersMap.editValueAt(index) = 0;
722        }
723    }
724
725    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
726    mPendingRequestsList.clear();
727
728    /*flush the metadata list*/
729    if (!mStoredMetadataList.empty()) {
730        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
731              m != mStoredMetadataList.end(); m++) {
732            mMetadataChannel->bufDone(m->meta_buf);
733            free(m->meta_buf);
734            m = mStoredMetadataList.erase(m);
735        }
736    }
737
738    mFirstRequest = true;
739
740    //Get min frame duration for this streams configuration
741    deriveMinFrameDuration();
742
743    pthread_mutex_unlock(&mMutex);
744    return rc;
745}
746
747/*===========================================================================
748 * FUNCTION   : validateCaptureRequest
749 *
750 * DESCRIPTION: validate a capture request from camera service
751 *
752 * PARAMETERS :
753 *   @request : request from framework to process
754 *
755 * RETURN     :
756 *
757 *==========================================================================*/
758int QCamera3HardwareInterface::validateCaptureRequest(
759                    camera3_capture_request_t *request)
760{
761    ssize_t idx = 0;
762    const camera3_stream_buffer_t *b;
763    CameraMetadata meta;
764
765    /* Sanity check the request */
766    if (request == NULL) {
767        ALOGE("%s: NULL capture request", __func__);
768        return BAD_VALUE;
769    }
770
771    uint32_t frameNumber = request->frame_number;
772    if (request->input_buffer != NULL &&
773            request->input_buffer->stream != mInputStream) {
774        ALOGE("%s: Request %d: Input buffer not from input stream!",
775                __FUNCTION__, frameNumber);
776        return BAD_VALUE;
777    }
778    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
779        ALOGE("%s: Request %d: No output buffers provided!",
780                __FUNCTION__, frameNumber);
781        return BAD_VALUE;
782    }
783    if (request->input_buffer != NULL) {
784        b = request->input_buffer;
785        QCamera3Channel *channel =
786            static_cast<QCamera3Channel*>(b->stream->priv);
787        if (channel == NULL) {
788            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
789                    __func__, frameNumber, idx);
790            return BAD_VALUE;
791        }
792        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
793            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
794                    __func__, frameNumber, idx);
795            return BAD_VALUE;
796        }
797        if (b->release_fence != -1) {
798            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->buffer == NULL) {
803            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807    }
808
809    // Validate all buffers
810    b = request->output_buffers;
811    do {
812        QCamera3Channel *channel =
813                static_cast<QCamera3Channel*>(b->stream->priv);
814        if (channel == NULL) {
815            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
816                    __func__, frameNumber, idx);
817            return BAD_VALUE;
818        }
819        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
820            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
821                    __func__, frameNumber, idx);
822            return BAD_VALUE;
823        }
824        if (b->release_fence != -1) {
825            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
826                    __func__, frameNumber, idx);
827            return BAD_VALUE;
828        }
829        if (b->buffer == NULL) {
830            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
831                    __func__, frameNumber, idx);
832            return BAD_VALUE;
833        }
834        idx++;
835        b = request->output_buffers + idx;
836    } while (idx < (ssize_t)request->num_output_buffers);
837
838    return NO_ERROR;
839}
840
841/*===========================================================================
842 * FUNCTION   : deriveMinFrameDuration
843 *
844 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
845 *              on currently configured streams.
846 *
847 * PARAMETERS : NONE
848 *
849 * RETURN     : NONE
850 *
851 *==========================================================================*/
852void QCamera3HardwareInterface::deriveMinFrameDuration()
853{
854    int32_t maxJpegDimension, maxProcessedDimension;
855
856    maxJpegDimension = 0;
857    maxProcessedDimension = 0;
858
859    // Figure out maximum jpeg, processed, and raw dimensions
860    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
861        it != mStreamInfo.end(); it++) {
862
863        // Input stream doesn't have valid stream_type
864        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
865            continue;
866
867        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
868        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
869            if (dimension > maxJpegDimension)
870                maxJpegDimension = dimension;
871        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
872            if (dimension > maxProcessedDimension)
873                maxProcessedDimension = dimension;
874        }
875    }
876
877    //Assume all jpeg dimensions are in processed dimensions.
878    if (maxJpegDimension > maxProcessedDimension)
879        maxProcessedDimension = maxJpegDimension;
880
881    //Find minimum durations for processed, jpeg, and raw
882    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
883    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
884        if (maxProcessedDimension ==
885            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
886            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
887            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
888            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
889            break;
890        }
891    }
892}
893
894/*===========================================================================
895 * FUNCTION   : getMinFrameDuration
896 *
897 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
898 *              and current request configuration.
899 *
900 * PARAMETERS : @request: requset sent by the frameworks
901 *
902 * RETURN     : min farme duration for a particular request
903 *
904 *==========================================================================*/
905int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
906{
907    bool hasJpegStream = false;
908    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
909        const camera3_stream_t *stream = request->output_buffers[i].stream;
910        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
911            hasJpegStream = true;
912    }
913
914    if (!hasJpegStream)
915        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
916    else
917        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
918}
919
920/*===========================================================================
921 * FUNCTION   : registerStreamBuffers
922 *
923 * DESCRIPTION: Register buffers for a given stream with the HAL device.
924 *
925 * PARAMETERS :
926 *   @stream_list : streams to be configured
927 *
928 * RETURN     :
929 *
930 *==========================================================================*/
931int QCamera3HardwareInterface::registerStreamBuffers(
932        const camera3_stream_buffer_set_t *buffer_set)
933{
934    int rc = 0;
935
936    pthread_mutex_lock(&mMutex);
937
938    if (buffer_set == NULL) {
939        ALOGE("%s: Invalid buffer_set parameter.", __func__);
940        pthread_mutex_unlock(&mMutex);
941        return -EINVAL;
942    }
943    if (buffer_set->stream == NULL) {
944        ALOGE("%s: Invalid stream parameter.", __func__);
945        pthread_mutex_unlock(&mMutex);
946        return -EINVAL;
947    }
948    if (buffer_set->num_buffers < 1) {
949        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
950        pthread_mutex_unlock(&mMutex);
951        return -EINVAL;
952    }
953    if (buffer_set->buffers == NULL) {
954        ALOGE("%s: Invalid buffers parameter.", __func__);
955        pthread_mutex_unlock(&mMutex);
956        return -EINVAL;
957    }
958
959    camera3_stream_t *stream = buffer_set->stream;
960    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
961
962    //set the buffer_set in the mStreamInfo array
963    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
964            it != mStreamInfo.end(); it++) {
965        if ((*it)->stream == stream) {
966            uint32_t numBuffers = buffer_set->num_buffers;
967            (*it)->buffer_set.stream = buffer_set->stream;
968            (*it)->buffer_set.num_buffers = numBuffers;
969            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
970            if ((*it)->buffer_set.buffers == NULL) {
971                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
972                pthread_mutex_unlock(&mMutex);
973                return -ENOMEM;
974            }
975            for (size_t j = 0; j < numBuffers; j++){
976                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
977            }
978            (*it)->registered = 1;
979        }
980    }
981    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
982    if (rc < 0) {
983        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
984        pthread_mutex_unlock(&mMutex);
985        return -ENODEV;
986    }
987
988    pthread_mutex_unlock(&mMutex);
989    return NO_ERROR;
990}
991
992/*===========================================================================
993 * FUNCTION   : processCaptureRequest
994 *
995 * DESCRIPTION: process a capture request from camera service
996 *
997 * PARAMETERS :
998 *   @request : request from framework to process
999 *
1000 * RETURN     :
1001 *
1002 *==========================================================================*/
1003int QCamera3HardwareInterface::processCaptureRequest(
1004                    camera3_capture_request_t *request)
1005{
1006    int rc = NO_ERROR;
1007    int32_t request_id;
1008    CameraMetadata meta;
1009    MetadataBufferInfo reproc_meta;
1010    int queueMetadata = 0;
1011
1012    pthread_mutex_lock(&mMutex);
1013
1014    rc = validateCaptureRequest(request);
1015    if (rc != NO_ERROR) {
1016        ALOGE("%s: incoming request is not valid", __func__);
1017        pthread_mutex_unlock(&mMutex);
1018        return rc;
1019    }
1020
1021    meta = request->settings;
1022
1023    // For first capture request, send capture intent, and
1024    // stream on all streams
1025    if (mFirstRequest) {
1026
1027        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1028            int32_t hal_version = CAM_HAL_V3;
1029            uint8_t captureIntent =
1030                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1031
1032            memset(mParameters, 0, sizeof(parm_buffer_t));
1033            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1034            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1035                sizeof(hal_version), &hal_version);
1036            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1037                sizeof(captureIntent), &captureIntent);
1038            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1039                mParameters);
1040        }
1041
1042        mMetadataChannel->start();
1043        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1044            it != mStreamInfo.end(); it++) {
1045            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1046            channel->start();
1047        }
1048    }
1049
1050    uint32_t frameNumber = request->frame_number;
1051    uint32_t streamTypeMask = 0;
1052
1053    if (meta.exists(ANDROID_REQUEST_ID)) {
1054        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1055        mCurrentRequestId = request_id;
1056        ALOGV("%s: Received request with id: %d",__func__, request_id);
1057    } else if (mFirstRequest || mCurrentRequestId == -1){
1058        ALOGE("%s: Unable to find request id field, \
1059                & no previous id available", __func__);
1060        return NAME_NOT_FOUND;
1061    } else {
1062        ALOGV("%s: Re-using old request id", __func__);
1063        request_id = mCurrentRequestId;
1064    }
1065
1066    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1067                                    __func__, __LINE__,
1068                                    request->num_output_buffers,
1069                                    request->input_buffer,
1070                                    frameNumber);
1071    // Acquire all request buffers first
1072    int blob_request = 0;
1073    for (size_t i = 0; i < request->num_output_buffers; i++) {
1074        const camera3_stream_buffer_t& output = request->output_buffers[i];
1075        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1076        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1077
1078        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1079        //Call function to store local copy of jpeg data for encode params.
1080            blob_request = 1;
1081            rc = getJpegSettings(request->settings);
1082            if (rc < 0) {
1083                ALOGE("%s: failed to get jpeg parameters", __func__);
1084                pthread_mutex_unlock(&mMutex);
1085                return rc;
1086            }
1087        }
1088
1089        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1090        if (rc != OK) {
1091            ALOGE("%s: fence wait failed %d", __func__, rc);
1092            pthread_mutex_unlock(&mMutex);
1093            return rc;
1094        }
1095        streamTypeMask |= channel->getStreamTypeMask();
1096    }
1097
1098    rc = setFrameParameters(request, streamTypeMask);
1099    if (rc < 0) {
1100        ALOGE("%s: fail to set frame parameters", __func__);
1101        pthread_mutex_unlock(&mMutex);
1102        return rc;
1103    }
1104
1105    /* Update pending request list and pending buffers map */
1106    PendingRequestInfo pendingRequest;
1107    pendingRequest.frame_number = frameNumber;
1108    pendingRequest.num_buffers = request->num_output_buffers;
1109    pendingRequest.request_id = request_id;
1110    pendingRequest.blob_request = blob_request;
1111    if (blob_request)
1112        pendingRequest.input_jpeg_settings = *mJpegSettings;
1113    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1114
1115    for (size_t i = 0; i < request->num_output_buffers; i++) {
1116        RequestedBufferInfo requestedBuf;
1117        requestedBuf.stream = request->output_buffers[i].stream;
1118        requestedBuf.buffer = NULL;
1119        pendingRequest.buffers.push_back(requestedBuf);
1120
1121        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1122    }
1123    mPendingRequestsList.push_back(pendingRequest);
1124
1125    // Notify metadata channel we receive a request
1126    mMetadataChannel->request(NULL, frameNumber);
1127
1128    // Call request on other streams
1129    for (size_t i = 0; i < request->num_output_buffers; i++) {
1130        const camera3_stream_buffer_t& output = request->output_buffers[i];
1131        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1132        mm_camera_buf_def_t *pInputBuffer = NULL;
1133
1134        if (channel == NULL) {
1135            ALOGE("%s: invalid channel pointer for stream", __func__);
1136            continue;
1137        }
1138
1139        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1140            QCamera3RegularChannel* inputChannel = NULL;
1141            if(request->input_buffer != NULL){
1142                //Try to get the internal format
1143                inputChannel = (QCamera3RegularChannel*)
1144                    request->input_buffer->stream->priv;
1145                if(inputChannel == NULL ){
1146                    ALOGE("%s: failed to get input channel handle", __func__);
1147                } else {
1148                    pInputBuffer =
1149                        inputChannel->getInternalFormatBuffer(
1150                                request->input_buffer->buffer);
1151                    ALOGD("%s: Input buffer dump",__func__);
1152                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1153                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1154                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1155                    ALOGD("Handle:%p", request->input_buffer->buffer);
1156                    //TODO: need to get corresponding metadata and send it to pproc
1157                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1158                         m != mStoredMetadataList.end(); m++) {
1159                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1160                            reproc_meta.meta_buf = m->meta_buf;
1161                            queueMetadata = 1;
1162                            break;
1163                        }
1164                    }
1165                }
1166            }
1167            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1168                            pInputBuffer,(QCamera3Channel*)inputChannel);
1169            if (queueMetadata) {
1170                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1171            }
1172        } else {
1173            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1174                __LINE__, output.buffer, frameNumber);
1175            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1176                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1177                     m != mStoredMetadataList.end(); m++) {
1178                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1179                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1180                            mMetadataChannel->bufDone(m->meta_buf);
1181                            free(m->meta_buf);
1182                            m = mStoredMetadataList.erase(m);
1183                            break;
1184                        }
1185                   }
1186                }
1187            }
1188            rc = channel->request(output.buffer, frameNumber);
1189        }
1190        if (rc < 0)
1191            ALOGE("%s: request failed", __func__);
1192    }
1193
1194    mFirstRequest = false;
1195    // Added a timed condition wait
1196    struct timespec ts;
1197    uint8_t isValidTimeout = 1;
1198    rc = clock_gettime(CLOCK_REALTIME, &ts);
1199    if (rc < 0) {
1200        isValidTimeout = 0;
1201        ALOGE("%s: Error reading the real time clock!!", __func__);
1202    }
1203    else {
1204        // Make timeout as 5 sec for request to be honored
1205        ts.tv_sec += 5;
1206    }
1207    //Block on conditional variable
1208    mPendingRequest = 1;
1209    while (mPendingRequest == 1) {
1210        if (!isValidTimeout) {
1211            ALOGV("%s: Blocking on conditional wait", __func__);
1212            pthread_cond_wait(&mRequestCond, &mMutex);
1213        }
1214        else {
1215            ALOGV("%s: Blocking on timed conditional wait", __func__);
1216            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1217            if (rc == ETIMEDOUT) {
1218                rc = -ENODEV;
1219                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1220                break;
1221            }
1222        }
1223        ALOGV("%s: Unblocked", __func__);
1224    }
1225
1226    pthread_mutex_unlock(&mMutex);
1227
1228    return rc;
1229}
1230
1231/*===========================================================================
1232 * FUNCTION   : getMetadataVendorTagOps
1233 *
1234 * DESCRIPTION:
1235 *
1236 * PARAMETERS :
1237 *
1238 *
1239 * RETURN     :
1240 *==========================================================================*/
1241void QCamera3HardwareInterface::getMetadataVendorTagOps(
1242                    vendor_tag_query_ops_t* /*ops*/)
1243{
1244    /* Enable locks when we eventually add Vendor Tags */
1245    /*
1246    pthread_mutex_lock(&mMutex);
1247
1248    pthread_mutex_unlock(&mMutex);
1249    */
1250    return;
1251}
1252
1253/*===========================================================================
1254 * FUNCTION   : dump
1255 *
1256 * DESCRIPTION:
1257 *
1258 * PARAMETERS :
1259 *
1260 *
1261 * RETURN     :
1262 *==========================================================================*/
1263void QCamera3HardwareInterface::dump(int /*fd*/)
1264{
1265    /*Enable lock when we implement this function*/
1266    /*
1267    pthread_mutex_lock(&mMutex);
1268
1269    pthread_mutex_unlock(&mMutex);
1270    */
1271    return;
1272}
1273
1274/*===========================================================================
1275 * FUNCTION   : flush
1276 *
1277 * DESCRIPTION:
1278 *
1279 * PARAMETERS :
1280 *
1281 *
1282 * RETURN     :
1283 *==========================================================================*/
1284int QCamera3HardwareInterface::flush()
1285{
1286    /*Enable lock when we implement this function*/
1287    /*
1288    pthread_mutex_lock(&mMutex);
1289
1290    pthread_mutex_unlock(&mMutex);
1291    */
1292    return 0;
1293}
1294
1295/*===========================================================================
1296 * FUNCTION   : captureResultCb
1297 *
1298 * DESCRIPTION: Callback handler for all capture result
1299 *              (streams, as well as metadata)
1300 *
1301 * PARAMETERS :
1302 *   @metadata : metadata information
1303 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1304 *               NULL if metadata.
1305 *
1306 * RETURN     : NONE
1307 *==========================================================================*/
1308void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1309                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1310{
1311    pthread_mutex_lock(&mMutex);
1312
1313    if (metadata_buf) {
1314        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1315        int32_t frame_number_valid = *(int32_t *)
1316            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1317        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1318            CAM_INTF_META_PENDING_REQUESTS, metadata);
1319        uint32_t frame_number = *(uint32_t *)
1320            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1321        const struct timeval *tv = (const struct timeval *)
1322            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1323        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1324            tv->tv_usec * NSEC_PER_USEC;
1325
1326        if (!frame_number_valid) {
1327            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1328            mMetadataChannel->bufDone(metadata_buf);
1329            free(metadata_buf);
1330            goto done_metadata;
1331        }
1332        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1333                frame_number, capture_time);
1334
1335        // Go through the pending requests info and send shutter/results to frameworks
1336        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1337                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1338            camera3_capture_result_t result;
1339            camera3_notify_msg_t notify_msg;
1340            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1341
1342            // Flush out all entries with less or equal frame numbers.
1343
1344            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1345            //Right now it's the same as metadata timestamp
1346
1347            //TODO: When there is metadata drop, how do we derive the timestamp of
1348            //dropped frames? For now, we fake the dropped timestamp by substracting
1349            //from the reported timestamp
1350            nsecs_t current_capture_time = capture_time -
1351                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1352
1353            // Send shutter notify to frameworks
1354            notify_msg.type = CAMERA3_MSG_SHUTTER;
1355            notify_msg.message.shutter.frame_number = i->frame_number;
1356            notify_msg.message.shutter.timestamp = current_capture_time;
1357            mCallbackOps->notify(mCallbackOps, &notify_msg);
1358            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1359                    i->frame_number, capture_time);
1360
1361            // Send empty metadata with already filled buffers for dropped metadata
1362            // and send valid metadata with already filled buffers for current metadata
1363            if (i->frame_number < frame_number) {
1364                CameraMetadata dummyMetadata;
1365                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1366                        &current_capture_time, 1);
1367                dummyMetadata.update(ANDROID_REQUEST_ID,
1368                        &(i->request_id), 1);
1369                result.result = dummyMetadata.release();
1370            } else {
1371                result.result = translateCbMetadataToResultMetadata(metadata,
1372                        current_capture_time, i->request_id, i->blob_request,
1373                        &(i->input_jpeg_settings));
1374                if (mIsZslMode) {
1375                   int found_metadata = 0;
1376                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1377                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1378                        j != i->buffers.end(); j++) {
1379                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1380                         //check if corresp. zsl already exists in the stored metadata list
1381                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1382                               m != mStoredMetadataList.begin(); m++) {
1383                            if (m->frame_number == frame_number) {
1384                               m->meta_buf = metadata_buf;
1385                               found_metadata = 1;
1386                               break;
1387                            }
1388                         }
1389                         if (!found_metadata) {
1390                            MetadataBufferInfo store_meta_info;
1391                            store_meta_info.meta_buf = metadata_buf;
1392                            store_meta_info.frame_number = frame_number;
1393                            mStoredMetadataList.push_back(store_meta_info);
1394                            found_metadata = 1;
1395                         }
1396                      }
1397                   }
1398                   if (!found_metadata) {
1399                       if (!i->input_buffer_present && i->blob_request) {
1400                          //livesnapshot or fallback non-zsl snapshot case
1401                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1402                                j != i->buffers.end(); j++){
1403                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1404                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1405                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1406                                 break;
1407                              }
1408                         }
1409                       } else {
1410                            //return the metadata immediately
1411                            mMetadataChannel->bufDone(metadata_buf);
1412                            free(metadata_buf);
1413                       }
1414                   }
1415               } else if (!mIsZslMode && i->blob_request) {
1416                   //If it is a blob request then send the metadata to the picture channel
1417                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1418               } else {
1419                   // Return metadata buffer
1420                   mMetadataChannel->bufDone(metadata_buf);
1421                   free(metadata_buf);
1422               }
1423
1424            }
1425            if (!result.result) {
1426                ALOGE("%s: metadata is NULL", __func__);
1427            }
1428            result.frame_number = i->frame_number;
1429            result.num_output_buffers = 0;
1430            result.output_buffers = NULL;
1431            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1432                    j != i->buffers.end(); j++) {
1433                if (j->buffer) {
1434                    result.num_output_buffers++;
1435                }
1436            }
1437
1438            if (result.num_output_buffers > 0) {
1439                camera3_stream_buffer_t *result_buffers =
1440                    new camera3_stream_buffer_t[result.num_output_buffers];
1441                if (!result_buffers) {
1442                    ALOGE("%s: Fatal error: out of memory", __func__);
1443                }
1444                size_t result_buffers_idx = 0;
1445                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1446                        j != i->buffers.end(); j++) {
1447                    if (j->buffer) {
1448                        result_buffers[result_buffers_idx++] = *(j->buffer);
1449                        free(j->buffer);
1450                        j->buffer = NULL;
1451                        mPendingBuffersMap.editValueFor(j->stream)--;
1452                    }
1453                }
1454                result.output_buffers = result_buffers;
1455
1456                mCallbackOps->process_capture_result(mCallbackOps, &result);
1457                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1458                        __func__, result.frame_number, current_capture_time);
1459                free_camera_metadata((camera_metadata_t *)result.result);
1460                delete[] result_buffers;
1461            } else {
1462                mCallbackOps->process_capture_result(mCallbackOps, &result);
1463                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1464                        __func__, result.frame_number, current_capture_time);
1465                free_camera_metadata((camera_metadata_t *)result.result);
1466            }
1467            // erase the element from the list
1468            i = mPendingRequestsList.erase(i);
1469        }
1470
1471
1472done_metadata:
1473        bool max_buffers_dequeued = false;
1474        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1475            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1476            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1477            if (queued_buffers == stream->max_buffers) {
1478                max_buffers_dequeued = true;
1479                break;
1480            }
1481        }
1482        if (!max_buffers_dequeued && !pending_requests) {
1483            // Unblock process_capture_request
1484            mPendingRequest = 0;
1485            pthread_cond_signal(&mRequestCond);
1486        }
1487    } else {
1488        // If the frame number doesn't exist in the pending request list,
1489        // directly send the buffer to the frameworks, and update pending buffers map
1490        // Otherwise, book-keep the buffer.
1491        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1492        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1493            i++;
1494        }
1495        if (i == mPendingRequestsList.end()) {
1496            // Verify all pending requests frame_numbers are greater
1497            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1498                    j != mPendingRequestsList.end(); j++) {
1499                if (j->frame_number < frame_number) {
1500                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1501                            __func__, j->frame_number, frame_number);
1502                }
1503            }
1504            camera3_capture_result_t result;
1505            result.result = NULL;
1506            result.frame_number = frame_number;
1507            result.num_output_buffers = 1;
1508            result.output_buffers = buffer;
1509            ALOGV("%s: result frame_number = %d, buffer = %p",
1510                    __func__, frame_number, buffer);
1511            mPendingBuffersMap.editValueFor(buffer->stream)--;
1512            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1513                int found = 0;
1514                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1515                      k != mStoredMetadataList.end(); k++) {
1516                    if (k->frame_number == frame_number) {
1517                        k->zsl_buf_hdl = buffer->buffer;
1518                        found = 1;
1519                        break;
1520                    }
1521                }
1522                if (!found) {
1523                   MetadataBufferInfo meta_info;
1524                   meta_info.frame_number = frame_number;
1525                   meta_info.zsl_buf_hdl = buffer->buffer;
1526                   mStoredMetadataList.push_back(meta_info);
1527                }
1528            }
1529            mCallbackOps->process_capture_result(mCallbackOps, &result);
1530        } else {
1531            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1532                    j != i->buffers.end(); j++) {
1533                if (j->stream == buffer->stream) {
1534                    if (j->buffer != NULL) {
1535                        ALOGE("%s: Error: buffer is already set", __func__);
1536                    } else {
1537                        j->buffer = (camera3_stream_buffer_t *)malloc(
1538                                sizeof(camera3_stream_buffer_t));
1539                        *(j->buffer) = *buffer;
1540                        ALOGV("%s: cache buffer %p at result frame_number %d",
1541                                __func__, buffer, frame_number);
1542                    }
1543                }
1544            }
1545        }
1546    }
1547    pthread_mutex_unlock(&mMutex);
1548    return;
1549}
1550
1551/*===========================================================================
1552 * FUNCTION   : translateCbMetadataToResultMetadata
1553 *
1554 * DESCRIPTION:
1555 *
1556 * PARAMETERS :
1557 *   @metadata : metadata information from callback
1558 *
1559 * RETURN     : camera_metadata_t*
1560 *              metadata in a format specified by fwk
1561 *==========================================================================*/
1562camera_metadata_t*
1563QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1564                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1565                                 int32_t request_id, int32_t BlobRequest,
1566                                 jpeg_settings_t* inputjpegsettings)
1567{
1568    CameraMetadata camMetadata;
1569    camera_metadata_t* resultMetadata;
1570
1571    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1572    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1573
1574    // Update the JPEG related info
1575    if (BlobRequest) {
1576        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1577        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1578
1579        int32_t thumbnailSizeTable[2];
1580        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1581        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1582        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1583        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1584               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1585
1586        if (inputjpegsettings->gps_coordinates[0]) {
1587            double gpsCoordinates[3];
1588            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1589            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1590            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1591            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1592            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1593                 gpsCoordinates[1],gpsCoordinates[2]);
1594        }
1595
1596        if (inputjpegsettings->gps_timestamp) {
1597            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1598            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1599        }
1600
1601        String8 str(inputjpegsettings->gps_processing_method);
1602        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1603            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1604        }
1605    }
1606    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1607    uint8_t next_entry;
1608    while (curr_entry != CAM_INTF_PARM_MAX) {
1609       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1610       switch (curr_entry) {
1611         case CAM_INTF_META_FACE_DETECTION:{
1612             cam_face_detection_data_t *faceDetectionInfo =
1613                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1614             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1615             int32_t faceIds[numFaces];
1616             uint8_t faceScores[numFaces];
1617             int32_t faceRectangles[numFaces * 4];
1618             int32_t faceLandmarks[numFaces * 6];
1619             int j = 0, k = 0;
1620             for (int i = 0; i < numFaces; i++) {
1621                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1622                 faceScores[i] = faceDetectionInfo->faces[i].score;
1623                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1624                         faceRectangles+j, -1);
1625                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1626                 j+= 4;
1627                 k+= 6;
1628             }
1629             if (numFaces > 0) {
1630                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1631                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1632                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1633                     faceRectangles, numFaces*4);
1634                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1635                     faceLandmarks, numFaces*6);
1636             }
1637            break;
1638            }
1639         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1640             uint8_t  *color_correct_mode =
1641                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1642             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1643             break;
1644          }
1645         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1646             int32_t  *ae_precapture_id =
1647                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1648             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1649             break;
1650          }
1651         case CAM_INTF_META_AEC_ROI: {
1652            cam_area_t  *hAeRegions =
1653                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1654             int32_t aeRegions[5];
1655             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1656             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1657             break;
1658          }
1659          case CAM_INTF_META_AEC_STATE:{
1660             uint8_t *ae_state =
1661                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1662             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1663             break;
1664          }
1665          case CAM_INTF_PARM_FOCUS_MODE:{
1666             uint8_t  *focusMode =
1667                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1668             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1669                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1670             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1671             break;
1672          }
1673          case CAM_INTF_META_AF_ROI:{
1674             /*af regions*/
1675             cam_area_t  *hAfRegions =
1676                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1677             int32_t afRegions[5];
1678             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1679             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1680             break;
1681          }
1682          case CAM_INTF_META_AF_STATE: {
1683             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1684             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1685             break;
1686          }
1687          case CAM_INTF_META_AF_TRIGGER_ID: {
1688             int32_t  *afTriggerId =
1689                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1690             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1691             break;
1692          }
1693          case CAM_INTF_PARM_WHITE_BALANCE: {
1694               uint8_t  *whiteBalance =
1695                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1696               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1697                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1698                   *whiteBalance);
1699               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1700               break;
1701          }
1702          case CAM_INTF_META_AWB_REGIONS: {
1703             /*awb regions*/
1704             cam_area_t  *hAwbRegions =
1705                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1706             int32_t awbRegions[5];
1707             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1708             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1709             break;
1710          }
1711          case CAM_INTF_META_AWB_STATE: {
1712             uint8_t  *whiteBalanceState =
1713                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1714             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1715             break;
1716          }
1717          case CAM_INTF_META_MODE: {
1718             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1719             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1720             break;
1721          }
1722          case CAM_INTF_META_EDGE_MODE: {
1723             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1724             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1725             break;
1726          }
1727          case CAM_INTF_META_FLASH_POWER: {
1728             uint8_t  *flashPower =
1729                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1730             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1731             break;
1732          }
1733          case CAM_INTF_META_FLASH_FIRING_TIME: {
1734             int64_t  *flashFiringTime =
1735                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1736             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1737             break;
1738          }
1739          case CAM_INTF_META_FLASH_STATE: {
1740             uint8_t  *flashState =
1741                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1742             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1743             break;
1744          }
1745          case CAM_INTF_META_FLASH_MODE:{
1746             uint8_t *flashMode = (uint8_t*)
1747                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1748             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1749             break;
1750          }
1751          case CAM_INTF_META_HOTPIXEL_MODE: {
1752              uint8_t  *hotPixelMode =
1753                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1754              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1755              break;
1756          }
1757          case CAM_INTF_META_LENS_APERTURE:{
1758             float  *lensAperture =
1759                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1760             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1761             break;
1762          }
1763          case CAM_INTF_META_LENS_FILTERDENSITY: {
1764             float  *filterDensity =
1765                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1766             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1767             break;
1768          }
1769          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1770             float  *focalLength =
1771                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1772             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1773             break;
1774          }
1775          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1776             float  *focusDistance =
1777                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1778             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1779             break;
1780          }
1781          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1782             float  *focusRange =
1783                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1784             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1785          }
1786          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1787             uint8_t  *opticalStab =
1788                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1789             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1790          }
1791          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1792             uint8_t  *noiseRedMode =
1793                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1794             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1795             break;
1796          }
1797          case CAM_INTF_META_SCALER_CROP_REGION: {
1798             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1799             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1800             int32_t scalerCropRegion[4];
1801             scalerCropRegion[0] = hScalerCropRegion->left;
1802             scalerCropRegion[1] = hScalerCropRegion->top;
1803             scalerCropRegion[2] = hScalerCropRegion->width;
1804             scalerCropRegion[3] = hScalerCropRegion->height;
1805             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1806             break;
1807          }
1808          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1809             int64_t  *sensorExpTime =
1810                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1811             mMetadataResponse.exposure_time = *sensorExpTime;
1812             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1813             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1814             break;
1815          }
1816          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1817             int64_t  *sensorFameDuration =
1818                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1819             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1820             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1821             break;
1822          }
1823          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1824             int32_t  *sensorSensitivity =
1825                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1826             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1827             mMetadataResponse.iso_speed = *sensorSensitivity;
1828             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1829             break;
1830          }
1831          case CAM_INTF_META_SHADING_MODE: {
1832             uint8_t  *shadingMode =
1833                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1834             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1835             break;
1836          }
1837          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1838             uint8_t  *faceDetectMode =
1839                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1840             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1841                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1842                                                        *faceDetectMode);
1843             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1844             break;
1845          }
1846          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1847             uint8_t  *histogramMode =
1848                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1849             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1850             break;
1851          }
1852          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1853               uint8_t  *sharpnessMapMode =
1854                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1855               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1856                                  sharpnessMapMode, 1);
1857               break;
1858           }
1859          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1860               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1861               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1862               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1863                                  (int32_t*)sharpnessMap->sharpness,
1864                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1865               break;
1866          }
1867          case CAM_INTF_META_LENS_SHADING_MAP: {
1868               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1869               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1870               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1871               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1872               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1873                                  (float*)lensShadingMap->lens_shading,
1874                                  4*map_width*map_height);
1875               break;
1876          }
1877          case CAM_INTF_META_TONEMAP_CURVES:{
1878             //Populate CAM_INTF_META_TONEMAP_CURVES
1879             /* ch0 = G, ch 1 = B, ch 2 = R*/
1880             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1881             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1882             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1883                                (float*)tonemap->curves[0].tonemap_points,
1884                                tonemap->tonemap_points_cnt * 2);
1885
1886             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1887                                (float*)tonemap->curves[1].tonemap_points,
1888                                tonemap->tonemap_points_cnt * 2);
1889
1890             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1891                                (float*)tonemap->curves[2].tonemap_points,
1892                                tonemap->tonemap_points_cnt * 2);
1893             break;
1894          }
1895          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1896             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1897             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1898             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1899             break;
1900          }
1901          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1902              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1903              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1904              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1905                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1906              break;
1907          }
1908          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1909             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1910             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1911             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1912                       predColorCorrectionGains->gains, 4);
1913             break;
1914          }
1915          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1916             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1917                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1918             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1919                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1920             break;
1921
1922          }
1923          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1924             uint8_t *blackLevelLock = (uint8_t*)
1925               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1926             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1927             break;
1928          }
1929          case CAM_INTF_META_SCENE_FLICKER:{
1930             uint8_t *sceneFlicker = (uint8_t*)
1931             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1932             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1933             break;
1934          }
1935          case CAM_INTF_PARM_LED_MODE:
1936             break;
1937          default:
1938             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1939                   __func__, curr_entry);
1940             break;
1941       }
1942       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
1943       curr_entry = next_entry;
1944    }
1945    resultMetadata = camMetadata.release();
1946    return resultMetadata;
1947}
1948
1949/*===========================================================================
1950 * FUNCTION   : convertToRegions
1951 *
1952 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1953 *
1954 * PARAMETERS :
1955 *   @rect   : cam_rect_t struct to convert
1956 *   @region : int32_t destination array
1957 *   @weight : if we are converting from cam_area_t, weight is valid
1958 *             else weight = -1
1959 *
1960 *==========================================================================*/
1961void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1962    region[0] = rect.left;
1963    region[1] = rect.top;
1964    region[2] = rect.left + rect.width;
1965    region[3] = rect.top + rect.height;
1966    if (weight > -1) {
1967        region[4] = weight;
1968    }
1969}
1970
1971/*===========================================================================
1972 * FUNCTION   : convertFromRegions
1973 *
1974 * DESCRIPTION: helper method to convert from array to cam_rect_t
1975 *
1976 * PARAMETERS :
1977 *   @rect   : cam_rect_t struct to convert
1978 *   @region : int32_t destination array
1979 *   @weight : if we are converting from cam_area_t, weight is valid
1980 *             else weight = -1
1981 *
1982 *==========================================================================*/
1983void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1984                                                   const camera_metadata_t *settings,
1985                                                   uint32_t tag){
1986    CameraMetadata frame_settings;
1987    frame_settings = settings;
1988    int32_t x_min = frame_settings.find(tag).data.i32[0];
1989    int32_t y_min = frame_settings.find(tag).data.i32[1];
1990    int32_t x_max = frame_settings.find(tag).data.i32[2];
1991    int32_t y_max = frame_settings.find(tag).data.i32[3];
1992    roi->weight = frame_settings.find(tag).data.i32[4];
1993    roi->rect.left = x_min;
1994    roi->rect.top = y_min;
1995    roi->rect.width = x_max - x_min;
1996    roi->rect.height = y_max - y_min;
1997}
1998
1999/*===========================================================================
2000 * FUNCTION   : resetIfNeededROI
2001 *
2002 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2003 *              crop region
2004 *
2005 * PARAMETERS :
2006 *   @roi       : cam_area_t struct to resize
2007 *   @scalerCropRegion : cam_crop_region_t region to compare against
2008 *
2009 *
2010 *==========================================================================*/
2011bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2012                                                 const cam_crop_region_t* scalerCropRegion)
2013{
2014    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2015    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2016    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2017    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2018    if ((roi_x_max < scalerCropRegion->left) ||
2019        (roi_y_max < scalerCropRegion->top)  ||
2020        (roi->rect.left > crop_x_max) ||
2021        (roi->rect.top > crop_y_max)){
2022        return false;
2023    }
2024    if (roi->rect.left < scalerCropRegion->left) {
2025        roi->rect.left = scalerCropRegion->left;
2026    }
2027    if (roi->rect.top < scalerCropRegion->top) {
2028        roi->rect.top = scalerCropRegion->top;
2029    }
2030    if (roi_x_max > crop_x_max) {
2031        roi_x_max = crop_x_max;
2032    }
2033    if (roi_y_max > crop_y_max) {
2034        roi_y_max = crop_y_max;
2035    }
2036    roi->rect.width = roi_x_max - roi->rect.left;
2037    roi->rect.height = roi_y_max - roi->rect.top;
2038    return true;
2039}
2040
2041/*===========================================================================
2042 * FUNCTION   : convertLandmarks
2043 *
2044 * DESCRIPTION: helper method to extract the landmarks from face detection info
2045 *
2046 * PARAMETERS :
2047 *   @face   : cam_rect_t struct to convert
2048 *   @landmarks : int32_t destination array
2049 *
2050 *
2051 *==========================================================================*/
2052void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2053{
2054    landmarks[0] = face.left_eye_center.x;
2055    landmarks[1] = face.left_eye_center.y;
2056    landmarks[2] = face.right_eye_center.y;
2057    landmarks[3] = face.right_eye_center.y;
2058    landmarks[4] = face.mouth_center.x;
2059    landmarks[5] = face.mouth_center.y;
2060}
2061
2062#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2063/*===========================================================================
2064 * FUNCTION   : initCapabilities
2065 *
2066 * DESCRIPTION: initialize camera capabilities in static data struct
2067 *
2068 * PARAMETERS :
2069 *   @cameraId  : camera Id
2070 *
2071 * RETURN     : int32_t type of status
2072 *              NO_ERROR  -- success
2073 *              none-zero failure code
2074 *==========================================================================*/
2075int QCamera3HardwareInterface::initCapabilities(int cameraId)
2076{
2077    int rc = 0;
2078    mm_camera_vtbl_t *cameraHandle = NULL;
2079    QCamera3HeapMemory *capabilityHeap = NULL;
2080
2081    cameraHandle = camera_open(cameraId);
2082    if (!cameraHandle) {
2083        ALOGE("%s: camera_open failed", __func__);
2084        rc = -1;
2085        goto open_failed;
2086    }
2087
2088    capabilityHeap = new QCamera3HeapMemory();
2089    if (capabilityHeap == NULL) {
2090        ALOGE("%s: creation of capabilityHeap failed", __func__);
2091        goto heap_creation_failed;
2092    }
2093    /* Allocate memory for capability buffer */
2094    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2095    if(rc != OK) {
2096        ALOGE("%s: No memory for cappability", __func__);
2097        goto allocate_failed;
2098    }
2099
2100    /* Map memory for capability buffer */
2101    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2102    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2103                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2104                                capabilityHeap->getFd(0),
2105                                sizeof(cam_capability_t));
2106    if(rc < 0) {
2107        ALOGE("%s: failed to map capability buffer", __func__);
2108        goto map_failed;
2109    }
2110
2111    /* Query Capability */
2112    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2113    if(rc < 0) {
2114        ALOGE("%s: failed to query capability",__func__);
2115        goto query_failed;
2116    }
2117    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2118    if (!gCamCapability[cameraId]) {
2119        ALOGE("%s: out of memory", __func__);
2120        goto query_failed;
2121    }
2122    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2123                                        sizeof(cam_capability_t));
2124    rc = 0;
2125
2126query_failed:
2127    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2128                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2129map_failed:
2130    capabilityHeap->deallocate();
2131allocate_failed:
2132    delete capabilityHeap;
2133heap_creation_failed:
2134    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2135    cameraHandle = NULL;
2136open_failed:
2137    return rc;
2138}
2139
2140/*===========================================================================
2141 * FUNCTION   : initParameters
2142 *
2143 * DESCRIPTION: initialize camera parameters
2144 *
2145 * PARAMETERS :
2146 *
2147 * RETURN     : int32_t type of status
2148 *              NO_ERROR  -- success
2149 *              none-zero failure code
2150 *==========================================================================*/
2151int QCamera3HardwareInterface::initParameters()
2152{
2153    int rc = 0;
2154
2155    //Allocate Set Param Buffer
2156    mParamHeap = new QCamera3HeapMemory();
2157    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2158    if(rc != OK) {
2159        rc = NO_MEMORY;
2160        ALOGE("Failed to allocate SETPARM Heap memory");
2161        delete mParamHeap;
2162        mParamHeap = NULL;
2163        return rc;
2164    }
2165
2166    //Map memory for parameters buffer
2167    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2168            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2169            mParamHeap->getFd(0),
2170            sizeof(parm_buffer_t));
2171    if(rc < 0) {
2172        ALOGE("%s:failed to map SETPARM buffer",__func__);
2173        rc = FAILED_TRANSACTION;
2174        mParamHeap->deallocate();
2175        delete mParamHeap;
2176        mParamHeap = NULL;
2177        return rc;
2178    }
2179
2180    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2181    return rc;
2182}
2183
2184/*===========================================================================
2185 * FUNCTION   : deinitParameters
2186 *
2187 * DESCRIPTION: de-initialize camera parameters
2188 *
2189 * PARAMETERS :
2190 *
2191 * RETURN     : NONE
2192 *==========================================================================*/
2193void QCamera3HardwareInterface::deinitParameters()
2194{
2195    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2196            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2197
2198    mParamHeap->deallocate();
2199    delete mParamHeap;
2200    mParamHeap = NULL;
2201
2202    mParameters = NULL;
2203}
2204
2205/*===========================================================================
2206 * FUNCTION   : calcMaxJpegSize
2207 *
2208 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2209 *
2210 * PARAMETERS :
2211 *
2212 * RETURN     : max_jpeg_size
2213 *==========================================================================*/
2214int QCamera3HardwareInterface::calcMaxJpegSize()
2215{
2216    int32_t max_jpeg_size = 0;
2217    int temp_width, temp_height;
2218    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2219        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2220        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2221        if (temp_width * temp_height > max_jpeg_size ) {
2222            max_jpeg_size = temp_width * temp_height;
2223        }
2224    }
2225    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2226    return max_jpeg_size;
2227}
2228
2229/*===========================================================================
2230 * FUNCTION   : initStaticMetadata
2231 *
2232 * DESCRIPTION: initialize the static metadata
2233 *
2234 * PARAMETERS :
2235 *   @cameraId  : camera Id
2236 *
2237 * RETURN     : int32_t type of status
2238 *              0  -- success
2239 *              non-zero failure code
2240 *==========================================================================*/
2241int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2242{
2243    int rc = 0;
2244    CameraMetadata staticInfo;
2245
2246    /* android.info: hardware level */
2247    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2248    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2249        &supportedHardwareLevel, 1);
2250
2251    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2252    /*HAL 3 only*/
2253    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2254                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2255
2256    /*hard coded for now but this should come from sensor*/
2257    float min_focus_distance;
2258    if(facingBack){
2259        min_focus_distance = 10;
2260    } else {
2261        min_focus_distance = 0;
2262    }
2263    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2264                    &min_focus_distance, 1);
2265
2266    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2267                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2268
2269    /*should be using focal lengths but sensor doesn't provide that info now*/
2270    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2271                      &gCamCapability[cameraId]->focal_length,
2272                      1);
2273
2274    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2275                      gCamCapability[cameraId]->apertures,
2276                      gCamCapability[cameraId]->apertures_count);
2277
2278    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2279                gCamCapability[cameraId]->filter_densities,
2280                gCamCapability[cameraId]->filter_densities_count);
2281
2282
2283    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2284                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2285                      gCamCapability[cameraId]->optical_stab_modes_count);
2286
2287    staticInfo.update(ANDROID_LENS_POSITION,
2288                      gCamCapability[cameraId]->lens_position,
2289                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2290
2291    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2292                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2293    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2294                      lens_shading_map_size,
2295                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2296
2297    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2298                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2299    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2300            geo_correction_map_size,
2301            sizeof(geo_correction_map_size)/sizeof(int32_t));
2302
2303    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2304                       gCamCapability[cameraId]->geo_correction_map,
2305                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2306
2307    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2308            gCamCapability[cameraId]->sensor_physical_size, 2);
2309
2310    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2311            gCamCapability[cameraId]->exposure_time_range, 2);
2312
2313    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2314            &gCamCapability[cameraId]->max_frame_duration, 1);
2315
2316    camera_metadata_rational baseGainFactor = {
2317            gCamCapability[cameraId]->base_gain_factor.numerator,
2318            gCamCapability[cameraId]->base_gain_factor.denominator};
2319    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2320                      &baseGainFactor, 1);
2321
2322    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2323                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2324
2325    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2326                                               gCamCapability[cameraId]->pixel_array_size.height};
2327    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2328                      pixel_array_size, 2);
2329
2330    int32_t active_array_size[] = {0, 0,
2331                                                gCamCapability[cameraId]->active_array_size.width,
2332                                                gCamCapability[cameraId]->active_array_size.height};
2333    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2334                      active_array_size, 4);
2335
2336    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2337            &gCamCapability[cameraId]->white_level, 1);
2338
2339    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2340            gCamCapability[cameraId]->black_level_pattern, 4);
2341
2342    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2343                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2344
2345    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2346                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2347
2348    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2349                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2350
2351    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2352                      &gCamCapability[cameraId]->histogram_size, 1);
2353
2354    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2355            &gCamCapability[cameraId]->max_histogram_count, 1);
2356
2357    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2358                                                gCamCapability[cameraId]->sharpness_map_size.height};
2359
2360    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2361            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2362
2363    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2364            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2365
2366
2367    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2368                      &gCamCapability[cameraId]->raw_min_duration,
2369                       1);
2370
2371    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2372                                                HAL_PIXEL_FORMAT_BLOB};
2373    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2374    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2375                      scalar_formats,
2376                      scalar_formats_count);
2377
2378    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2379    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2380              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2381              available_processed_sizes);
2382    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2383                available_processed_sizes,
2384                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2385
2386    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2387                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2388                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2389
2390    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2391    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2392                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2393                 available_fps_ranges);
2394    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2395            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2396
2397    camera_metadata_rational exposureCompensationStep = {
2398            gCamCapability[cameraId]->exp_compensation_step.numerator,
2399            gCamCapability[cameraId]->exp_compensation_step.denominator};
2400    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2401                      &exposureCompensationStep, 1);
2402
2403    /*TO DO*/
2404    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2405    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2406                      availableVstabModes, sizeof(availableVstabModes));
2407
2408    /*HAL 1 and HAL 3 common*/
2409    float maxZoom = 4;
2410    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2411            &maxZoom, 1);
2412
2413    int32_t max3aRegions = 1;
2414    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2415            &max3aRegions, 1);
2416
2417    uint8_t availableFaceDetectModes[] = {
2418            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2419            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2420    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2421                      availableFaceDetectModes,
2422                      sizeof(availableFaceDetectModes));
2423
2424    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2425                                                        gCamCapability[cameraId]->exposure_compensation_max};
2426    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2427            exposureCompensationRange,
2428            sizeof(exposureCompensationRange)/sizeof(int32_t));
2429
2430    uint8_t lensFacing = (facingBack) ?
2431            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2432    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2433
2434    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2435                available_processed_sizes,
2436                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2437
2438    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2439                      available_thumbnail_sizes,
2440                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2441
2442    int32_t max_jpeg_size = 0;
2443    int temp_width, temp_height;
2444    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2445        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2446        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2447        if (temp_width * temp_height > max_jpeg_size ) {
2448            max_jpeg_size = temp_width * temp_height;
2449        }
2450    }
2451    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2452    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2453                      &max_jpeg_size, 1);
2454
2455    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2456    int32_t size = 0;
2457    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2458        int val = lookupFwkName(EFFECT_MODES_MAP,
2459                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2460                                   gCamCapability[cameraId]->supported_effects[i]);
2461        if (val != NAME_NOT_FOUND) {
2462            avail_effects[size] = (uint8_t)val;
2463            size++;
2464        }
2465    }
2466    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2467                      avail_effects,
2468                      size);
2469
2470    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2471    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2472    int32_t supported_scene_modes_cnt = 0;
2473    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2474        int val = lookupFwkName(SCENE_MODES_MAP,
2475                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2476                                gCamCapability[cameraId]->supported_scene_modes[i]);
2477        if (val != NAME_NOT_FOUND) {
2478            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2479            supported_indexes[supported_scene_modes_cnt] = i;
2480            supported_scene_modes_cnt++;
2481        }
2482    }
2483
2484    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2485                      avail_scene_modes,
2486                      supported_scene_modes_cnt);
2487
2488    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2489    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2490                      supported_scene_modes_cnt,
2491                      scene_mode_overrides,
2492                      supported_indexes,
2493                      cameraId);
2494    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2495                      scene_mode_overrides,
2496                      supported_scene_modes_cnt*3);
2497
2498    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2499    size = 0;
2500    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2501        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2502                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2503                                 gCamCapability[cameraId]->supported_antibandings[i]);
2504        if (val != NAME_NOT_FOUND) {
2505            avail_antibanding_modes[size] = (uint8_t)val;
2506            size++;
2507        }
2508
2509    }
2510    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2511                      avail_antibanding_modes,
2512                      size);
2513
2514    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2515    size = 0;
2516    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2517        int val = lookupFwkName(FOCUS_MODES_MAP,
2518                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2519                                gCamCapability[cameraId]->supported_focus_modes[i]);
2520        if (val != NAME_NOT_FOUND) {
2521            avail_af_modes[size] = (uint8_t)val;
2522            size++;
2523        }
2524    }
2525    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2526                      avail_af_modes,
2527                      size);
2528
2529    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2530    size = 0;
2531    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2532        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2533                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2534                                    gCamCapability[cameraId]->supported_white_balances[i]);
2535        if (val != NAME_NOT_FOUND) {
2536            avail_awb_modes[size] = (uint8_t)val;
2537            size++;
2538        }
2539    }
2540    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2541                      avail_awb_modes,
2542                      size);
2543
2544    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2545    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2546      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2547
2548    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2549            available_flash_levels,
2550            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2551
2552
2553    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2554    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2555            &flashAvailable, 1);
2556
2557    uint8_t avail_ae_modes[5];
2558    size = 0;
2559    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2560        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2561        size++;
2562    }
2563    if (flashAvailable) {
2564        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2565        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2566        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2567    }
2568    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2569                      avail_ae_modes,
2570                      size);
2571
2572    int32_t sensitivity_range[2];
2573    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2574    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2575    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2576                      sensitivity_range,
2577                      sizeof(sensitivity_range) / sizeof(int32_t));
2578
2579    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2580                      &gCamCapability[cameraId]->max_analog_sensitivity,
2581                      1);
2582
2583    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2584                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2585                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2586
2587    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2588    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2589                      &sensor_orientation,
2590                      1);
2591
2592    int32_t max_output_streams[3] = {1, 3, 1};
2593    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2594                      max_output_streams,
2595                      3);
2596
2597    gStaticMetadata[cameraId] = staticInfo.release();
2598    return rc;
2599}
2600
2601/*===========================================================================
2602 * FUNCTION   : makeTable
2603 *
2604 * DESCRIPTION: make a table of sizes
2605 *
2606 * PARAMETERS :
2607 *
2608 *
2609 *==========================================================================*/
2610void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2611                                          int32_t* sizeTable)
2612{
2613    int j = 0;
2614    for (int i = 0; i < size; i++) {
2615        sizeTable[j] = dimTable[i].width;
2616        sizeTable[j+1] = dimTable[i].height;
2617        j+=2;
2618    }
2619}
2620
2621/*===========================================================================
2622 * FUNCTION   : makeFPSTable
2623 *
2624 * DESCRIPTION: make a table of fps ranges
2625 *
2626 * PARAMETERS :
2627 *
2628 *==========================================================================*/
2629void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2630                                          int32_t* fpsRangesTable)
2631{
2632    int j = 0;
2633    for (int i = 0; i < size; i++) {
2634        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2635        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2636        j+=2;
2637    }
2638}
2639
2640/*===========================================================================
2641 * FUNCTION   : makeOverridesList
2642 *
2643 * DESCRIPTION: make a list of scene mode overrides
2644 *
2645 * PARAMETERS :
2646 *
2647 *
2648 *==========================================================================*/
2649void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2650                                                  uint8_t size, uint8_t* overridesList,
2651                                                  uint8_t* supported_indexes,
2652                                                  int camera_id)
2653{
2654    /*daemon will give a list of overrides for all scene modes.
2655      However we should send the fwk only the overrides for the scene modes
2656      supported by the framework*/
2657    int j = 0, index = 0, supt = 0;
2658    uint8_t focus_override;
2659    for (int i = 0; i < size; i++) {
2660        supt = 0;
2661        index = supported_indexes[i];
2662        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2663        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2664                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2665                                                    overridesTable[index].awb_mode);
2666        focus_override = (uint8_t)overridesTable[index].af_mode;
2667        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2668           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2669              supt = 1;
2670              break;
2671           }
2672        }
2673        if (supt) {
2674           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2675                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2676                                              focus_override);
2677        } else {
2678           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2679        }
2680        j+=3;
2681    }
2682}
2683
2684/*===========================================================================
2685 * FUNCTION   : getPreviewHalPixelFormat
2686 *
2687 * DESCRIPTION: convert the format to type recognized by framework
2688 *
2689 * PARAMETERS : format : the format from backend
2690 *
2691 ** RETURN    : format recognized by framework
2692 *
2693 *==========================================================================*/
2694int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2695{
2696    int32_t halPixelFormat;
2697
2698    switch (format) {
2699    case CAM_FORMAT_YUV_420_NV12:
2700        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2701        break;
2702    case CAM_FORMAT_YUV_420_NV21:
2703        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2704        break;
2705    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2706        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2707        break;
2708    case CAM_FORMAT_YUV_420_YV12:
2709        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2710        break;
2711    case CAM_FORMAT_YUV_422_NV16:
2712    case CAM_FORMAT_YUV_422_NV61:
2713    default:
2714        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2715        break;
2716    }
2717    return halPixelFormat;
2718}
2719
2720/*===========================================================================
2721 * FUNCTION   : getSensorSensitivity
2722 *
2723 * DESCRIPTION: convert iso_mode to an integer value
2724 *
2725 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2726 *
2727 ** RETURN    : sensitivity supported by sensor
2728 *
2729 *==========================================================================*/
2730int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2731{
2732    int32_t sensitivity;
2733
2734    switch (iso_mode) {
2735    case CAM_ISO_MODE_100:
2736        sensitivity = 100;
2737        break;
2738    case CAM_ISO_MODE_200:
2739        sensitivity = 200;
2740        break;
2741    case CAM_ISO_MODE_400:
2742        sensitivity = 400;
2743        break;
2744    case CAM_ISO_MODE_800:
2745        sensitivity = 800;
2746        break;
2747    case CAM_ISO_MODE_1600:
2748        sensitivity = 1600;
2749        break;
2750    default:
2751        sensitivity = -1;
2752        break;
2753    }
2754    return sensitivity;
2755}
2756
2757
2758/*===========================================================================
2759 * FUNCTION   : AddSetParmEntryToBatch
2760 *
2761 * DESCRIPTION: add set parameter entry into batch
2762 *
2763 * PARAMETERS :
2764 *   @p_table     : ptr to parameter buffer
2765 *   @paramType   : parameter type
2766 *   @paramLength : length of parameter value
2767 *   @paramValue  : ptr to parameter value
2768 *
2769 * RETURN     : int32_t type of status
2770 *              NO_ERROR  -- success
2771 *              none-zero failure code
2772 *==========================================================================*/
2773int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2774                                                          cam_intf_parm_type_t paramType,
2775                                                          uint32_t paramLength,
2776                                                          void *paramValue)
2777{
2778    int position = paramType;
2779    int current, next;
2780
2781    /*************************************************************************
2782    *                 Code to take care of linking next flags                *
2783    *************************************************************************/
2784    current = GET_FIRST_PARAM_ID(p_table);
2785    if (position == current){
2786        //DO NOTHING
2787    } else if (position < current){
2788        SET_NEXT_PARAM_ID(position, p_table, current);
2789        SET_FIRST_PARAM_ID(p_table, position);
2790    } else {
2791        /* Search for the position in the linked list where we need to slot in*/
2792        while (position > GET_NEXT_PARAM_ID(current, p_table))
2793            current = GET_NEXT_PARAM_ID(current, p_table);
2794
2795        /*If node already exists no need to alter linking*/
2796        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2797            next = GET_NEXT_PARAM_ID(current, p_table);
2798            SET_NEXT_PARAM_ID(current, p_table, position);
2799            SET_NEXT_PARAM_ID(position, p_table, next);
2800        }
2801    }
2802
2803    /*************************************************************************
2804    *                   Copy contents into entry                             *
2805    *************************************************************************/
2806
2807    if (paramLength > sizeof(parm_type_t)) {
2808        ALOGE("%s:Size of input larger than max entry size",__func__);
2809        return BAD_VALUE;
2810    }
2811    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2812    return NO_ERROR;
2813}
2814
2815/*===========================================================================
2816 * FUNCTION   : lookupFwkName
2817 *
2818 * DESCRIPTION: In case the enum is not same in fwk and backend
2819 *              make sure the parameter is correctly propogated
2820 *
2821 * PARAMETERS  :
2822 *   @arr      : map between the two enums
2823 *   @len      : len of the map
2824 *   @hal_name : name of the hal_parm to map
2825 *
2826 * RETURN     : int type of status
2827 *              fwk_name  -- success
2828 *              none-zero failure code
2829 *==========================================================================*/
2830int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2831                                             int len, int hal_name)
2832{
2833
2834    for (int i = 0; i < len; i++) {
2835        if (arr[i].hal_name == hal_name)
2836            return arr[i].fwk_name;
2837    }
2838
2839    /* Not able to find matching framework type is not necessarily
2840     * an error case. This happens when mm-camera supports more attributes
2841     * than the frameworks do */
2842    ALOGD("%s: Cannot find matching framework type", __func__);
2843    return NAME_NOT_FOUND;
2844}
2845
2846/*===========================================================================
2847 * FUNCTION   : lookupHalName
2848 *
2849 * DESCRIPTION: In case the enum is not same in fwk and backend
2850 *              make sure the parameter is correctly propogated
2851 *
2852 * PARAMETERS  :
2853 *   @arr      : map between the two enums
2854 *   @len      : len of the map
2855 *   @fwk_name : name of the hal_parm to map
2856 *
2857 * RETURN     : int32_t type of status
2858 *              hal_name  -- success
2859 *              none-zero failure code
2860 *==========================================================================*/
2861int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2862                                             int len, int fwk_name)
2863{
2864    for (int i = 0; i < len; i++) {
2865       if (arr[i].fwk_name == fwk_name)
2866           return arr[i].hal_name;
2867    }
2868    ALOGE("%s: Cannot find matching hal type", __func__);
2869    return NAME_NOT_FOUND;
2870}
2871
2872/*===========================================================================
2873 * FUNCTION   : getCapabilities
2874 *
2875 * DESCRIPTION: query camera capabilities
2876 *
2877 * PARAMETERS :
2878 *   @cameraId  : camera Id
2879 *   @info      : camera info struct to be filled in with camera capabilities
2880 *
2881 * RETURN     : int32_t type of status
2882 *              NO_ERROR  -- success
2883 *              none-zero failure code
2884 *==========================================================================*/
2885int QCamera3HardwareInterface::getCamInfo(int cameraId,
2886                                    struct camera_info *info)
2887{
2888    int rc = 0;
2889
2890    if (NULL == gCamCapability[cameraId]) {
2891        rc = initCapabilities(cameraId);
2892        if (rc < 0) {
2893            //pthread_mutex_unlock(&g_camlock);
2894            return rc;
2895        }
2896    }
2897
2898    if (NULL == gStaticMetadata[cameraId]) {
2899        rc = initStaticMetadata(cameraId);
2900        if (rc < 0) {
2901            return rc;
2902        }
2903    }
2904
2905    switch(gCamCapability[cameraId]->position) {
2906    case CAM_POSITION_BACK:
2907        info->facing = CAMERA_FACING_BACK;
2908        break;
2909
2910    case CAM_POSITION_FRONT:
2911        info->facing = CAMERA_FACING_FRONT;
2912        break;
2913
2914    default:
2915        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2916        rc = -1;
2917        break;
2918    }
2919
2920
2921    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2922    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2923    info->static_camera_characteristics = gStaticMetadata[cameraId];
2924
2925    return rc;
2926}
2927
2928/*===========================================================================
2929 * FUNCTION   : translateMetadata
2930 *
2931 * DESCRIPTION: translate the metadata into camera_metadata_t
2932 *
2933 * PARAMETERS : type of the request
2934 *
2935 *
2936 * RETURN     : success: camera_metadata_t*
2937 *              failure: NULL
2938 *
2939 *==========================================================================*/
2940camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2941{
2942    pthread_mutex_lock(&mMutex);
2943
2944    if (mDefaultMetadata[type] != NULL) {
2945        pthread_mutex_unlock(&mMutex);
2946        return mDefaultMetadata[type];
2947    }
2948    //first time we are handling this request
2949    //fill up the metadata structure using the wrapper class
2950    CameraMetadata settings;
2951    //translate from cam_capability_t to camera_metadata_tag_t
2952    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2953    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2954    int32_t defaultRequestID = 0;
2955    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
2956
2957    /*control*/
2958
2959    uint8_t controlIntent = 0;
2960    switch (type) {
2961      case CAMERA3_TEMPLATE_PREVIEW:
2962        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2963        break;
2964      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2965        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2966        break;
2967      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2968        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2969        break;
2970      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2971        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2972        break;
2973      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2974        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2975        break;
2976      default:
2977        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2978        break;
2979    }
2980    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2981
2982    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2983            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2984
2985    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2986    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2987
2988    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2989    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2990
2991    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2992    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2993
2994    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2995    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2996
2997    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2998    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2999
3000    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
3001    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3002
3003    static uint8_t focusMode;
3004    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
3005        ALOGE("%s: Setting focus mode to auto", __func__);
3006        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
3007    } else {
3008        ALOGE("%s: Setting focus mode to off", __func__);
3009        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3010    }
3011    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3012
3013    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3014    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3015
3016    /*flash*/
3017    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3018    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3019
3020    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3021    settings.update(ANDROID_FLASH_FIRING_POWER,
3022            &flashFiringLevel, 1);
3023
3024    /* lens */
3025    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3026    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3027
3028    if (gCamCapability[mCameraId]->filter_densities_count) {
3029        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3030        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3031                        gCamCapability[mCameraId]->filter_densities_count);
3032    }
3033
3034    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3035    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3036
3037    /* Exposure time(Update the Min Exposure Time)*/
3038    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3039    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3040
3041    /* frame duration */
3042    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3043    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3044
3045    /* sensitivity */
3046    static const int32_t default_sensitivity = 100;
3047    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3048
3049    /*edge mode*/
3050    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3051    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3052
3053    /*noise reduction mode*/
3054    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3055    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3056
3057    /*color correction mode*/
3058    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3059    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3060
3061    /*transform matrix mode*/
3062    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3063    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3064
3065    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3066    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3067
3068    mDefaultMetadata[type] = settings.release();
3069
3070    pthread_mutex_unlock(&mMutex);
3071    return mDefaultMetadata[type];
3072}
3073
3074/*===========================================================================
3075 * FUNCTION   : setFrameParameters
3076 *
3077 * DESCRIPTION: set parameters per frame as requested in the metadata from
3078 *              framework
3079 *
3080 * PARAMETERS :
3081 *   @request   : request that needs to be serviced
3082 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3083 *
3084 * RETURN     : success: NO_ERROR
3085 *              failure:
3086 *==========================================================================*/
3087int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3088                    uint32_t streamTypeMask)
3089{
3090    /*translate from camera_metadata_t type to parm_type_t*/
3091    int rc = 0;
3092    if (request->settings == NULL && mFirstRequest) {
3093        /*settings cannot be null for the first request*/
3094        return BAD_VALUE;
3095    }
3096
3097    int32_t hal_version = CAM_HAL_V3;
3098
3099    memset(mParameters, 0, sizeof(parm_buffer_t));
3100    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3101    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3102                sizeof(hal_version), &hal_version);
3103    if (rc < 0) {
3104        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3105        return BAD_VALUE;
3106    }
3107
3108    /*we need to update the frame number in the parameters*/
3109    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3110                                sizeof(request->frame_number), &(request->frame_number));
3111    if (rc < 0) {
3112        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3113        return BAD_VALUE;
3114    }
3115
3116    /* Update stream id mask where buffers are requested */
3117    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3118                                sizeof(streamTypeMask), &streamTypeMask);
3119    if (rc < 0) {
3120        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3121        return BAD_VALUE;
3122    }
3123
3124    if(request->settings != NULL){
3125        rc = translateMetadataToParameters(request);
3126    }
3127    /*set the parameters to backend*/
3128    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3129    return rc;
3130}
3131
3132/*===========================================================================
3133 * FUNCTION   : translateMetadataToParameters
3134 *
3135 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3136 *
3137 *
3138 * PARAMETERS :
3139 *   @request  : request sent from framework
3140 *
3141 *
3142 * RETURN     : success: NO_ERROR
3143 *              failure:
3144 *==========================================================================*/
3145int QCamera3HardwareInterface::translateMetadataToParameters
3146                                  (const camera3_capture_request_t *request)
3147{
3148    int rc = 0;
3149    CameraMetadata frame_settings;
3150    frame_settings = request->settings;
3151
3152    /* Do not change the order of the following list unless you know what you are
3153     * doing.
3154     * The order is laid out in such a way that parameters in the front of the table
3155     * may be used to override the parameters later in the table. Examples are:
3156     * 1. META_MODE should precede AEC/AWB/AF MODE
3157     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3158     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3159     * 4. Any mode should precede it's corresponding settings
3160     */
3161    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3162        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3163        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3164                sizeof(metaMode), &metaMode);
3165        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3166           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3167           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3168                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3169                                             fwk_sceneMode);
3170           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3171                sizeof(sceneMode), &sceneMode);
3172        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3173           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3174           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3175                sizeof(sceneMode), &sceneMode);
3176        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3177           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3178           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3179                sizeof(sceneMode), &sceneMode);
3180        }
3181    }
3182
3183    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3184        uint8_t fwk_aeMode =
3185            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3186        uint8_t aeMode;
3187        int32_t redeye;
3188
3189        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3190            aeMode = CAM_AE_MODE_OFF;
3191        } else {
3192            aeMode = CAM_AE_MODE_ON;
3193        }
3194        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3195            redeye = 1;
3196        } else {
3197            redeye = 0;
3198        }
3199
3200        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3201                                          sizeof(AE_FLASH_MODE_MAP),
3202                                          fwk_aeMode);
3203        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3204                sizeof(aeMode), &aeMode);
3205        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3206                sizeof(flashMode), &flashMode);
3207        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3208                sizeof(redeye), &redeye);
3209    }
3210
3211    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3212        uint8_t fwk_whiteLevel =
3213            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3214        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3215                sizeof(WHITE_BALANCE_MODES_MAP),
3216                fwk_whiteLevel);
3217        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3218                sizeof(whiteLevel), &whiteLevel);
3219    }
3220
3221    float focalDistance = -1.0;
3222    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3223        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3224        rc = AddSetParmEntryToBatch(mParameters,
3225                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3226                sizeof(focalDistance), &focalDistance);
3227    }
3228
3229    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3230        uint8_t fwk_focusMode =
3231            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3232        uint8_t focusMode;
3233        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3234            focusMode = CAM_FOCUS_MODE_INFINITY;
3235        } else{
3236         focusMode = lookupHalName(FOCUS_MODES_MAP,
3237                                   sizeof(FOCUS_MODES_MAP),
3238                                   fwk_focusMode);
3239        }
3240        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3241                sizeof(focusMode), &focusMode);
3242    }
3243
3244    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3245        int32_t antibandingMode =
3246            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3247        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3248                sizeof(antibandingMode), &antibandingMode);
3249    }
3250
3251    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3252        int32_t expCompensation = frame_settings.find(
3253            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3254        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3255            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3256        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3257            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3258        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3259          sizeof(expCompensation), &expCompensation);
3260    }
3261
3262    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3263        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3264        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3265                sizeof(aeLock), &aeLock);
3266    }
3267    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3268        cam_fps_range_t fps_range;
3269        fps_range.min_fps =
3270            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3271        fps_range.max_fps =
3272            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3273        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3274                sizeof(fps_range), &fps_range);
3275    }
3276
3277    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3278        uint8_t awbLock =
3279            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3280        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3281                sizeof(awbLock), &awbLock);
3282    }
3283
3284    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3285        uint8_t fwk_effectMode =
3286            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3287        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3288                sizeof(EFFECT_MODES_MAP),
3289                fwk_effectMode);
3290        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3291                sizeof(effectMode), &effectMode);
3292    }
3293
3294    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3295        uint8_t colorCorrectMode =
3296            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3297        rc =
3298            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3299                    sizeof(colorCorrectMode), &colorCorrectMode);
3300    }
3301
3302    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3303        cam_color_correct_gains_t colorCorrectGains;
3304        for (int i = 0; i < 4; i++) {
3305            colorCorrectGains.gains[i] =
3306                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3307        }
3308        rc =
3309            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3310                    sizeof(colorCorrectGains), &colorCorrectGains);
3311    }
3312
3313    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3314        cam_color_correct_matrix_t colorCorrectTransform;
3315        cam_rational_type_t transform_elem;
3316        int num = 0;
3317        for (int i = 0; i < 3; i++) {
3318           for (int j = 0; j < 3; j++) {
3319              transform_elem.numerator =
3320                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3321              transform_elem.denominator =
3322                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3323              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3324              num++;
3325           }
3326        }
3327        rc =
3328            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3329                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3330    }
3331
3332    cam_trigger_t aecTrigger;
3333    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3334    aecTrigger.trigger_id = -1;
3335    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3336        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3337        aecTrigger.trigger =
3338            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3339        aecTrigger.trigger_id =
3340            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3341    }
3342    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3343                                sizeof(aecTrigger), &aecTrigger);
3344
3345    /*af_trigger must come with a trigger id*/
3346    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3347        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3348        cam_trigger_t af_trigger;
3349        af_trigger.trigger =
3350            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3351        af_trigger.trigger_id =
3352            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3353        rc = AddSetParmEntryToBatch(mParameters,
3354                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3355    }
3356
3357    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3358        int32_t demosaic =
3359            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3360        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3361                sizeof(demosaic), &demosaic);
3362    }
3363
3364    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3365        cam_edge_application_t edge_application;
3366        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3367        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3368            edge_application.sharpness = 0;
3369        } else {
3370            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3371                int32_t edgeStrength =
3372                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3373                edge_application.sharpness = edgeStrength;
3374            } else {
3375                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3376            }
3377        }
3378        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3379                sizeof(edge_application), &edge_application);
3380    }
3381
3382    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3383        int32_t respectFlashMode = 1;
3384        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3385            uint8_t fwk_aeMode =
3386                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3387            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3388                respectFlashMode = 0;
3389                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3390                    __func__);
3391            }
3392        }
3393        if (respectFlashMode) {
3394            uint8_t flashMode =
3395                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3396            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3397                                          sizeof(FLASH_MODES_MAP),
3398                                          flashMode);
3399            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3400            // To check: CAM_INTF_META_FLASH_MODE usage
3401            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3402                          sizeof(flashMode), &flashMode);
3403        }
3404    }
3405
3406    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3407        uint8_t flashPower =
3408            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3409        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3410                sizeof(flashPower), &flashPower);
3411    }
3412
3413    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3414        int64_t flashFiringTime =
3415            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3416        rc = AddSetParmEntryToBatch(mParameters,
3417                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3418    }
3419
3420    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3421        uint8_t geometricMode =
3422            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3423        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3424                sizeof(geometricMode), &geometricMode);
3425    }
3426
3427    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3428        uint8_t geometricStrength =
3429            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3430        rc = AddSetParmEntryToBatch(mParameters,
3431                CAM_INTF_META_GEOMETRIC_STRENGTH,
3432                sizeof(geometricStrength), &geometricStrength);
3433    }
3434
3435    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3436        uint8_t hotPixelMode =
3437            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3438        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3439                sizeof(hotPixelMode), &hotPixelMode);
3440    }
3441
3442    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3443        float lensAperture =
3444            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3445        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3446                sizeof(lensAperture), &lensAperture);
3447    }
3448
3449    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3450        float filterDensity =
3451            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3452        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3453                sizeof(filterDensity), &filterDensity);
3454    }
3455
3456    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3457        float focalLength =
3458            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3459        rc = AddSetParmEntryToBatch(mParameters,
3460                CAM_INTF_META_LENS_FOCAL_LENGTH,
3461                sizeof(focalLength), &focalLength);
3462    }
3463
3464    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3465        uint8_t optStabMode =
3466            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3467        rc = AddSetParmEntryToBatch(mParameters,
3468                CAM_INTF_META_LENS_OPT_STAB_MODE,
3469                sizeof(optStabMode), &optStabMode);
3470    }
3471
3472    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3473        uint8_t noiseRedMode =
3474            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3475        rc = AddSetParmEntryToBatch(mParameters,
3476                CAM_INTF_META_NOISE_REDUCTION_MODE,
3477                sizeof(noiseRedMode), &noiseRedMode);
3478    }
3479
3480    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3481        uint8_t noiseRedStrength =
3482            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3483        rc = AddSetParmEntryToBatch(mParameters,
3484                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3485                sizeof(noiseRedStrength), &noiseRedStrength);
3486    }
3487
3488    cam_crop_region_t scalerCropRegion;
3489    bool scalerCropSet = false;
3490    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3491        scalerCropRegion.left =
3492            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3493        scalerCropRegion.top =
3494            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3495        scalerCropRegion.width =
3496            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3497        scalerCropRegion.height =
3498            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3499        rc = AddSetParmEntryToBatch(mParameters,
3500                CAM_INTF_META_SCALER_CROP_REGION,
3501                sizeof(scalerCropRegion), &scalerCropRegion);
3502        scalerCropSet = true;
3503    }
3504
3505    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3506        int64_t sensorExpTime =
3507            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3508        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3509        rc = AddSetParmEntryToBatch(mParameters,
3510                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3511                sizeof(sensorExpTime), &sensorExpTime);
3512    }
3513
3514    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3515        int64_t sensorFrameDuration =
3516            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3517        int64_t minFrameDuration = getMinFrameDuration(request);
3518        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3519        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3520            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3521        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3522        rc = AddSetParmEntryToBatch(mParameters,
3523                CAM_INTF_META_SENSOR_FRAME_DURATION,
3524                sizeof(sensorFrameDuration), &sensorFrameDuration);
3525    }
3526
3527    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3528        int32_t sensorSensitivity =
3529            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3530        if (sensorSensitivity <
3531                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3532            sensorSensitivity =
3533                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3534        if (sensorSensitivity >
3535                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3536            sensorSensitivity =
3537                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3538        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3539        rc = AddSetParmEntryToBatch(mParameters,
3540                CAM_INTF_META_SENSOR_SENSITIVITY,
3541                sizeof(sensorSensitivity), &sensorSensitivity);
3542    }
3543
3544    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3545        int32_t shadingMode =
3546            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3547        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3548                sizeof(shadingMode), &shadingMode);
3549    }
3550
3551    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3552        uint8_t shadingStrength =
3553            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3554        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3555                sizeof(shadingStrength), &shadingStrength);
3556    }
3557
3558    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3559        uint8_t fwk_facedetectMode =
3560            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3561        uint8_t facedetectMode =
3562            lookupHalName(FACEDETECT_MODES_MAP,
3563                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3564        rc = AddSetParmEntryToBatch(mParameters,
3565                CAM_INTF_META_STATS_FACEDETECT_MODE,
3566                sizeof(facedetectMode), &facedetectMode);
3567    }
3568
3569    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3570        uint8_t histogramMode =
3571            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3572        rc = AddSetParmEntryToBatch(mParameters,
3573                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3574                sizeof(histogramMode), &histogramMode);
3575    }
3576
3577    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3578        uint8_t sharpnessMapMode =
3579            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3580        rc = AddSetParmEntryToBatch(mParameters,
3581                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3582                sizeof(sharpnessMapMode), &sharpnessMapMode);
3583    }
3584
3585    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3586        uint8_t tonemapMode =
3587            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3588        rc = AddSetParmEntryToBatch(mParameters,
3589                CAM_INTF_META_TONEMAP_MODE,
3590                sizeof(tonemapMode), &tonemapMode);
3591    }
3592    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3593    /*All tonemap channels will have the same number of points*/
3594    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3595        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3596        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3597        cam_rgb_tonemap_curves tonemapCurves;
3598        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3599
3600        /* ch0 = G*/
3601        int point = 0;
3602        cam_tonemap_curve_t tonemapCurveGreen;
3603        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3604            for (int j = 0; j < 2; j++) {
3605               tonemapCurveGreen.tonemap_points[i][j] =
3606                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3607               point++;
3608            }
3609        }
3610        tonemapCurves.curves[0] = tonemapCurveGreen;
3611
3612        /* ch 1 = B */
3613        point = 0;
3614        cam_tonemap_curve_t tonemapCurveBlue;
3615        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3616            for (int j = 0; j < 2; j++) {
3617               tonemapCurveBlue.tonemap_points[i][j] =
3618                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3619               point++;
3620            }
3621        }
3622        tonemapCurves.curves[1] = tonemapCurveBlue;
3623
3624        /* ch 2 = R */
3625        point = 0;
3626        cam_tonemap_curve_t tonemapCurveRed;
3627        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3628            for (int j = 0; j < 2; j++) {
3629               tonemapCurveRed.tonemap_points[i][j] =
3630                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3631               point++;
3632            }
3633        }
3634        tonemapCurves.curves[2] = tonemapCurveRed;
3635
3636        rc = AddSetParmEntryToBatch(mParameters,
3637                CAM_INTF_META_TONEMAP_CURVES,
3638                sizeof(tonemapCurves), &tonemapCurves);
3639    }
3640
3641    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3642        uint8_t captureIntent =
3643            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3644        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3645                sizeof(captureIntent), &captureIntent);
3646    }
3647
3648    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3649        uint8_t blackLevelLock =
3650            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3651        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3652                sizeof(blackLevelLock), &blackLevelLock);
3653    }
3654
3655    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3656        uint8_t lensShadingMapMode =
3657            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3658        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3659                sizeof(lensShadingMapMode), &lensShadingMapMode);
3660    }
3661
3662    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3663        cam_area_t roi;
3664        bool reset = true;
3665        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3666        if (scalerCropSet) {
3667            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3668        }
3669        if (reset) {
3670            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3671                    sizeof(roi), &roi);
3672        }
3673    }
3674
3675    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3676        cam_area_t roi;
3677        bool reset = true;
3678        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3679        if (scalerCropSet) {
3680            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3681        }
3682        if (reset) {
3683            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3684                    sizeof(roi), &roi);
3685        }
3686    }
3687
3688    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3689        cam_area_t roi;
3690        bool reset = true;
3691        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3692        if (scalerCropSet) {
3693            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3694        }
3695        if (reset) {
3696            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3697                    sizeof(roi), &roi);
3698        }
3699    }
3700    return rc;
3701}
3702
3703/*===========================================================================
3704 * FUNCTION   : getJpegSettings
3705 *
3706 * DESCRIPTION: save the jpeg settings in the HAL
3707 *
3708 *
3709 * PARAMETERS :
3710 *   @settings  : frame settings information from framework
3711 *
3712 *
3713 * RETURN     : success: NO_ERROR
3714 *              failure:
3715 *==========================================================================*/
3716int QCamera3HardwareInterface::getJpegSettings
3717                                  (const camera_metadata_t *settings)
3718{
3719    if (mJpegSettings) {
3720        if (mJpegSettings->gps_timestamp) {
3721            free(mJpegSettings->gps_timestamp);
3722            mJpegSettings->gps_timestamp = NULL;
3723        }
3724        if (mJpegSettings->gps_coordinates) {
3725            for (int i = 0; i < 3; i++) {
3726                free(mJpegSettings->gps_coordinates[i]);
3727                mJpegSettings->gps_coordinates[i] = NULL;
3728            }
3729        }
3730        free(mJpegSettings);
3731        mJpegSettings = NULL;
3732    }
3733    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3734    CameraMetadata jpeg_settings;
3735    jpeg_settings = settings;
3736
3737    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3738        mJpegSettings->jpeg_orientation =
3739            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3740    } else {
3741        mJpegSettings->jpeg_orientation = 0;
3742    }
3743    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3744        mJpegSettings->jpeg_quality =
3745            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3746    } else {
3747        mJpegSettings->jpeg_quality = 85;
3748    }
3749    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3750        mJpegSettings->thumbnail_size.width =
3751            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3752        mJpegSettings->thumbnail_size.height =
3753            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3754    } else {
3755        mJpegSettings->thumbnail_size.width = 0;
3756        mJpegSettings->thumbnail_size.height = 0;
3757    }
3758    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3759        for (int i = 0; i < 3; i++) {
3760            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3761            *(mJpegSettings->gps_coordinates[i]) =
3762                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3763        }
3764    } else{
3765       for (int i = 0; i < 3; i++) {
3766            mJpegSettings->gps_coordinates[i] = NULL;
3767        }
3768    }
3769
3770    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3771        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3772        *(mJpegSettings->gps_timestamp) =
3773            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3774    } else {
3775        mJpegSettings->gps_timestamp = NULL;
3776    }
3777
3778    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3779        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3780        for (int i = 0; i < len; i++) {
3781            mJpegSettings->gps_processing_method[i] =
3782                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3783        }
3784        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3785            mJpegSettings->gps_processing_method[len] = '\0';
3786        }
3787    } else {
3788        mJpegSettings->gps_processing_method[0] = '\0';
3789    }
3790
3791    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3792        mJpegSettings->sensor_sensitivity =
3793            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3794    } else {
3795        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3796    }
3797
3798    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3799
3800    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3801        mJpegSettings->lens_focal_length =
3802            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3803    }
3804    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3805        mJpegSettings->exposure_compensation =
3806            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3807    }
3808    mJpegSettings->sharpness = 10; //default value
3809    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3810        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3811        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3812            mJpegSettings->sharpness = 0;
3813        }
3814    }
3815    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3816    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3817    mJpegSettings->is_jpeg_format = true;
3818    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3819    return 0;
3820}
3821
3822/*===========================================================================
3823 * FUNCTION   : captureResultCb
3824 *
3825 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3826 *
3827 * PARAMETERS :
3828 *   @frame  : frame information from mm-camera-interface
3829 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3830 *   @userdata: userdata
3831 *
3832 * RETURN     : NONE
3833 *==========================================================================*/
3834void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3835                camera3_stream_buffer_t *buffer,
3836                uint32_t frame_number, void *userdata)
3837{
3838    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3839    if (hw == NULL) {
3840        ALOGE("%s: Invalid hw %p", __func__, hw);
3841        return;
3842    }
3843
3844    hw->captureResultCb(metadata, buffer, frame_number);
3845    return;
3846}
3847
3848
3849/*===========================================================================
3850 * FUNCTION   : initialize
3851 *
3852 * DESCRIPTION: Pass framework callback pointers to HAL
3853 *
3854 * PARAMETERS :
3855 *
3856 *
3857 * RETURN     : Success : 0
3858 *              Failure: -ENODEV
3859 *==========================================================================*/
3860
3861int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3862                                  const camera3_callback_ops_t *callback_ops)
3863{
3864    ALOGV("%s: E", __func__);
3865    QCamera3HardwareInterface *hw =
3866        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3867    if (!hw) {
3868        ALOGE("%s: NULL camera device", __func__);
3869        return -ENODEV;
3870    }
3871
3872    int rc = hw->initialize(callback_ops);
3873    ALOGV("%s: X", __func__);
3874    return rc;
3875}
3876
3877/*===========================================================================
3878 * FUNCTION   : configure_streams
3879 *
3880 * DESCRIPTION:
3881 *
3882 * PARAMETERS :
3883 *
3884 *
3885 * RETURN     : Success: 0
3886 *              Failure: -EINVAL (if stream configuration is invalid)
3887 *                       -ENODEV (fatal error)
3888 *==========================================================================*/
3889
3890int QCamera3HardwareInterface::configure_streams(
3891        const struct camera3_device *device,
3892        camera3_stream_configuration_t *stream_list)
3893{
3894    ALOGV("%s: E", __func__);
3895    QCamera3HardwareInterface *hw =
3896        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3897    if (!hw) {
3898        ALOGE("%s: NULL camera device", __func__);
3899        return -ENODEV;
3900    }
3901    int rc = hw->configureStreams(stream_list);
3902    ALOGV("%s: X", __func__);
3903    return rc;
3904}
3905
3906/*===========================================================================
3907 * FUNCTION   : register_stream_buffers
3908 *
3909 * DESCRIPTION: Register stream buffers with the device
3910 *
3911 * PARAMETERS :
3912 *
3913 * RETURN     :
3914 *==========================================================================*/
3915int QCamera3HardwareInterface::register_stream_buffers(
3916        const struct camera3_device *device,
3917        const camera3_stream_buffer_set_t *buffer_set)
3918{
3919    ALOGV("%s: E", __func__);
3920    QCamera3HardwareInterface *hw =
3921        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3922    if (!hw) {
3923        ALOGE("%s: NULL camera device", __func__);
3924        return -ENODEV;
3925    }
3926    int rc = hw->registerStreamBuffers(buffer_set);
3927    ALOGV("%s: X", __func__);
3928    return rc;
3929}
3930
3931/*===========================================================================
3932 * FUNCTION   : construct_default_request_settings
3933 *
3934 * DESCRIPTION: Configure a settings buffer to meet the required use case
3935 *
3936 * PARAMETERS :
3937 *
3938 *
3939 * RETURN     : Success: Return valid metadata
3940 *              Failure: Return NULL
3941 *==========================================================================*/
3942const camera_metadata_t* QCamera3HardwareInterface::
3943    construct_default_request_settings(const struct camera3_device *device,
3944                                        int type)
3945{
3946
3947    ALOGV("%s: E", __func__);
3948    camera_metadata_t* fwk_metadata = NULL;
3949    QCamera3HardwareInterface *hw =
3950        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3951    if (!hw) {
3952        ALOGE("%s: NULL camera device", __func__);
3953        return NULL;
3954    }
3955
3956    fwk_metadata = hw->translateCapabilityToMetadata(type);
3957
3958    ALOGV("%s: X", __func__);
3959    return fwk_metadata;
3960}
3961
3962/*===========================================================================
3963 * FUNCTION   : process_capture_request
3964 *
3965 * DESCRIPTION:
3966 *
3967 * PARAMETERS :
3968 *
3969 *
3970 * RETURN     :
3971 *==========================================================================*/
3972int QCamera3HardwareInterface::process_capture_request(
3973                    const struct camera3_device *device,
3974                    camera3_capture_request_t *request)
3975{
3976    ALOGV("%s: E", __func__);
3977    QCamera3HardwareInterface *hw =
3978        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3979    if (!hw) {
3980        ALOGE("%s: NULL camera device", __func__);
3981        return -EINVAL;
3982    }
3983
3984    int rc = hw->processCaptureRequest(request);
3985    ALOGV("%s: X", __func__);
3986    return rc;
3987}
3988
3989/*===========================================================================
3990 * FUNCTION   : get_metadata_vendor_tag_ops
3991 *
3992 * DESCRIPTION:
3993 *
3994 * PARAMETERS :
3995 *
3996 *
3997 * RETURN     :
3998 *==========================================================================*/
3999
4000void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4001                const struct camera3_device *device,
4002                vendor_tag_query_ops_t* ops)
4003{
4004    ALOGV("%s: E", __func__);
4005    QCamera3HardwareInterface *hw =
4006        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4007    if (!hw) {
4008        ALOGE("%s: NULL camera device", __func__);
4009        return;
4010    }
4011
4012    hw->getMetadataVendorTagOps(ops);
4013    ALOGV("%s: X", __func__);
4014    return;
4015}
4016
4017/*===========================================================================
4018 * FUNCTION   : dump
4019 *
4020 * DESCRIPTION:
4021 *
4022 * PARAMETERS :
4023 *
4024 *
4025 * RETURN     :
4026 *==========================================================================*/
4027
4028void QCamera3HardwareInterface::dump(
4029                const struct camera3_device *device, int fd)
4030{
4031    ALOGV("%s: E", __func__);
4032    QCamera3HardwareInterface *hw =
4033        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4034    if (!hw) {
4035        ALOGE("%s: NULL camera device", __func__);
4036        return;
4037    }
4038
4039    hw->dump(fd);
4040    ALOGV("%s: X", __func__);
4041    return;
4042}
4043
4044/*===========================================================================
4045 * FUNCTION   : flush
4046 *
4047 * DESCRIPTION:
4048 *
4049 * PARAMETERS :
4050 *
4051 *
4052 * RETURN     :
4053 *==========================================================================*/
4054
4055int QCamera3HardwareInterface::flush(
4056                const struct camera3_device *device)
4057{
4058    int rc;
4059    ALOGV("%s: E", __func__);
4060    QCamera3HardwareInterface *hw =
4061        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4062    if (!hw) {
4063        ALOGE("%s: NULL camera device", __func__);
4064        return -EINVAL;
4065    }
4066
4067    rc = hw->flush();
4068    ALOGV("%s: X", __func__);
4069    return rc;
4070}
4071
4072/*===========================================================================
4073 * FUNCTION   : close_camera_device
4074 *
4075 * DESCRIPTION:
4076 *
4077 * PARAMETERS :
4078 *
4079 *
4080 * RETURN     :
4081 *==========================================================================*/
4082int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4083{
4084    ALOGV("%s: E", __func__);
4085    int ret = NO_ERROR;
4086    QCamera3HardwareInterface *hw =
4087        reinterpret_cast<QCamera3HardwareInterface *>(
4088            reinterpret_cast<camera3_device_t *>(device)->priv);
4089    if (!hw) {
4090        ALOGE("NULL camera device");
4091        return BAD_VALUE;
4092    }
4093    delete hw;
4094
4095    pthread_mutex_lock(&mCameraSessionLock);
4096    mCameraSessionActive = 0;
4097    pthread_mutex_unlock(&mCameraSessionLock);
4098    ALOGV("%s: X", __func__);
4099    return ret;
4100}
4101
4102/*===========================================================================
4103 * FUNCTION   : getWaveletDenoiseProcessPlate
4104 *
4105 * DESCRIPTION: query wavelet denoise process plate
4106 *
4107 * PARAMETERS : None
4108 *
4109 * RETURN     : WNR prcocess plate vlaue
4110 *==========================================================================*/
4111cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4112{
4113    char prop[PROPERTY_VALUE_MAX];
4114    memset(prop, 0, sizeof(prop));
4115    property_get("persist.denoise.process.plates", prop, "0");
4116    int processPlate = atoi(prop);
4117    switch(processPlate) {
4118    case 0:
4119        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4120    case 1:
4121        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4122    case 2:
4123        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4124    case 3:
4125        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4126    default:
4127        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4128    }
4129}
4130
4131/*===========================================================================
4132 * FUNCTION   : needRotationReprocess
4133 *
4134 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4135 *
4136 * PARAMETERS : none
4137 *
4138 * RETURN     : true: needed
4139 *              false: no need
4140 *==========================================================================*/
4141bool QCamera3HardwareInterface::needRotationReprocess()
4142{
4143
4144    if (!mJpegSettings->is_jpeg_format) {
4145        // RAW image, no need to reprocess
4146        return false;
4147    }
4148
4149    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4150        mJpegSettings->jpeg_orientation > 0) {
4151        // current rotation is not zero, and pp has the capability to process rotation
4152        ALOGD("%s: need do reprocess for rotation", __func__);
4153        return true;
4154    }
4155
4156    return false;
4157}
4158
4159/*===========================================================================
4160 * FUNCTION   : needReprocess
4161 *
4162 * DESCRIPTION: if reprocess in needed
4163 *
4164 * PARAMETERS : none
4165 *
4166 * RETURN     : true: needed
4167 *              false: no need
4168 *==========================================================================*/
4169bool QCamera3HardwareInterface::needReprocess()
4170{
4171    if (!mJpegSettings->is_jpeg_format) {
4172        // RAW image, no need to reprocess
4173        return false;
4174    }
4175
4176    if ((mJpegSettings->min_required_pp_mask > 0) ||
4177         isWNREnabled()) {
4178        // TODO: add for ZSL HDR later
4179        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4180        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4181        return true;
4182    }
4183    return needRotationReprocess();
4184}
4185
4186/*===========================================================================
4187 * FUNCTION   : addOnlineReprocChannel
4188 *
4189 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4190 *              coming from input channel
4191 *
4192 * PARAMETERS :
4193 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4194 *
4195 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4196 *==========================================================================*/
4197QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4198              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4199{
4200    int32_t rc = NO_ERROR;
4201    QCamera3ReprocessChannel *pChannel = NULL;
4202    if (pInputChannel == NULL) {
4203        ALOGE("%s: input channel obj is NULL", __func__);
4204        return NULL;
4205    }
4206
4207    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4208            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4209    if (NULL == pChannel) {
4210        ALOGE("%s: no mem for reprocess channel", __func__);
4211        return NULL;
4212    }
4213
4214    // Capture channel, only need snapshot and postview streams start together
4215    mm_camera_channel_attr_t attr;
4216    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4217    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4218    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4219    rc = pChannel->initialize();
4220    if (rc != NO_ERROR) {
4221        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4222        delete pChannel;
4223        return NULL;
4224    }
4225
4226    // pp feature config
4227    cam_pp_feature_config_t pp_config;
4228    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4229    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4230        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4231        pp_config.sharpness = mJpegSettings->sharpness;
4232    }
4233
4234    if (isWNREnabled()) {
4235        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4236        pp_config.denoise2d.denoise_enable = 1;
4237        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4238    }
4239    if (needRotationReprocess()) {
4240        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4241        int rotation = mJpegSettings->jpeg_orientation;
4242        if (rotation == 0) {
4243            pp_config.rotation = ROTATE_0;
4244        } else if (rotation == 90) {
4245            pp_config.rotation = ROTATE_90;
4246        } else if (rotation == 180) {
4247            pp_config.rotation = ROTATE_180;
4248        } else if (rotation == 270) {
4249            pp_config.rotation = ROTATE_270;
4250        }
4251    }
4252
4253   rc = pChannel->addReprocStreamsFromSource(pp_config,
4254                                             pInputChannel,
4255                                             mMetadataChannel);
4256
4257    if (rc != NO_ERROR) {
4258        delete pChannel;
4259        return NULL;
4260    }
4261    return pChannel;
4262}
4263
4264int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4265{
4266    return gCamCapability[mCameraId]->min_num_pp_bufs;
4267}
4268
4269bool QCamera3HardwareInterface::isWNREnabled() {
4270    return gCamCapability[mCameraId]->isWnrSupported;
4271}
4272
4273}; //end namespace qcamera
4274