QCamera3HWI.cpp revision 180e645f4b69b506721f0d3ed1b69cb250265860
1/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
87    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
88    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
89    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
91    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
92    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
93    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
94    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
95    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
96    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
97    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
98    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
99    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
100    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
101    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
105    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
106    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
107    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
108    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
110    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
111};
112
113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
121    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
124    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
126};
127
128const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
129    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
130    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
131    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
132};
133
134const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
136    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
137};
138
139const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
140    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
141      CAM_FOCUS_UNCALIBRATED },
142    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
143      CAM_FOCUS_APPROXIMATE },
144    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
145      CAM_FOCUS_CALIBRATED }
146};
147
148const int32_t available_thumbnail_sizes[] = {0, 0,
149                                             176, 144,
150                                             320, 240,
151                                             432, 288,
152                                             480, 288,
153                                             512, 288,
154                                             512, 384};
155
156const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
157    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
158    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
159    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
160    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
161    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
162};
163
164camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
165    initialize:                         QCamera3HardwareInterface::initialize,
166    configure_streams:                  QCamera3HardwareInterface::configure_streams,
167    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
168    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
169    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
170    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
171    dump:                               QCamera3HardwareInterface::dump,
172    flush:                              QCamera3HardwareInterface::flush,
173    reserved:                           {0},
174};
175
176int QCamera3HardwareInterface::kMaxInFlight = 5;
177
178/*===========================================================================
179 * FUNCTION   : QCamera3HardwareInterface
180 *
181 * DESCRIPTION: constructor of QCamera3HardwareInterface
182 *
183 * PARAMETERS :
184 *   @cameraId  : camera ID
185 *
186 * RETURN     : none
187 *==========================================================================*/
188QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
189    : mCameraId(cameraId),
190      mCameraHandle(NULL),
191      mCameraOpened(false),
192      mCameraInitialized(false),
193      mCallbackOps(NULL),
194      mInputStream(NULL),
195      mMetadataChannel(NULL),
196      mPictureChannel(NULL),
197      mFirstRequest(false),
198      mParamHeap(NULL),
199      mParameters(NULL),
200      mJpegSettings(NULL),
201      mIsZslMode(false),
202      mMinProcessedFrameDuration(0),
203      mMinJpegFrameDuration(0),
204      mMinRawFrameDuration(0),
205      m_pPowerModule(NULL),
206      mHdrHint(false)
207{
208    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
209    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
210    mCameraDevice.common.close = close_camera_device;
211    mCameraDevice.ops = &mCameraOps;
212    mCameraDevice.priv = this;
213    gCamCapability[cameraId]->version = CAM_HAL_V3;
214    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
215    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
216    gCamCapability[cameraId]->min_num_pp_bufs = 3;
217
218    pthread_cond_init(&mRequestCond, NULL);
219    mPendingRequest = 0;
220    mCurrentRequestId = -1;
221    pthread_mutex_init(&mMutex, NULL);
222
223    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
224        mDefaultMetadata[i] = NULL;
225
226#ifdef HAS_MULTIMEDIA_HINTS
227    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
228        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
229    }
230#endif
231}
232
233/*===========================================================================
234 * FUNCTION   : ~QCamera3HardwareInterface
235 *
236 * DESCRIPTION: destructor of QCamera3HardwareInterface
237 *
238 * PARAMETERS : none
239 *
240 * RETURN     : none
241 *==========================================================================*/
242QCamera3HardwareInterface::~QCamera3HardwareInterface()
243{
244    ALOGV("%s: E", __func__);
245    /* We need to stop all streams before deleting any stream */
246        /*flush the metadata list*/
247    if (!mStoredMetadataList.empty()) {
248        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
249              m != mStoredMetadataList.end(); ) {
250            mMetadataChannel->bufDone(m->meta_buf);
251            free(m->meta_buf);
252            m = mStoredMetadataList.erase(m);
253        }
254    }
255
256    // NOTE: 'camera3_stream_t *' objects are already freed at
257    //        this stage by the framework
258    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
259        it != mStreamInfo.end(); it++) {
260        QCamera3Channel *channel = (*it)->channel;
261        if (channel) {
262            channel->stop();
263        }
264    }
265
266    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
267        it != mStreamInfo.end(); it++) {
268        QCamera3Channel *channel = (*it)->channel;
269        if ((*it)->registered && (*it)->buffer_set.buffers) {
270             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
271        }
272        if (channel)
273            delete channel;
274        free (*it);
275    }
276
277    mPictureChannel = NULL;
278
279    if (mJpegSettings != NULL) {
280        free(mJpegSettings);
281        mJpegSettings = NULL;
282    }
283
284    /* Clean up all channels */
285    if (mCameraInitialized) {
286        if (mMetadataChannel) {
287            mMetadataChannel->stop();
288            delete mMetadataChannel;
289            mMetadataChannel = NULL;
290        }
291        deinitParameters();
292    }
293
294    if (mCameraOpened)
295        closeCamera();
296
297    mPendingBuffersMap.mPendingBufferList.clear();
298    mPendingRequestsList.clear();
299
300    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
301        if (mDefaultMetadata[i])
302            free_camera_metadata(mDefaultMetadata[i]);
303
304    pthread_cond_destroy(&mRequestCond);
305
306    pthread_mutex_destroy(&mMutex);
307    ALOGV("%s: X", __func__);
308}
309
310/*===========================================================================
311 * FUNCTION   : openCamera
312 *
313 * DESCRIPTION: open camera
314 *
315 * PARAMETERS :
316 *   @hw_device  : double ptr for camera device struct
317 *
318 * RETURN     : int32_t type of status
319 *              NO_ERROR  -- success
320 *              none-zero failure code
321 *==========================================================================*/
322int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
323{
324    int rc = 0;
325    pthread_mutex_lock(&mCameraSessionLock);
326    if (mCameraSessionActive) {
327        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
328        pthread_mutex_unlock(&mCameraSessionLock);
329        return -EUSERS;
330    }
331
332    if (mCameraOpened) {
333        *hw_device = NULL;
334        return PERMISSION_DENIED;
335    }
336
337    rc = openCamera();
338    if (rc == 0) {
339        *hw_device = &mCameraDevice.common;
340        mCameraSessionActive = 1;
341    } else
342        *hw_device = NULL;
343
344#ifdef HAS_MULTIMEDIA_HINTS
345    if (rc == 0) {
346        if (m_pPowerModule) {
347            if (m_pPowerModule->powerHint) {
348                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
349                        (void *)"state=1");
350            }
351        }
352    }
353#endif
354    pthread_mutex_unlock(&mCameraSessionLock);
355    return rc;
356}
357
358/*===========================================================================
359 * FUNCTION   : openCamera
360 *
361 * DESCRIPTION: open camera
362 *
363 * PARAMETERS : none
364 *
365 * RETURN     : int32_t type of status
366 *              NO_ERROR  -- success
367 *              none-zero failure code
368 *==========================================================================*/
369int QCamera3HardwareInterface::openCamera()
370{
371    if (mCameraHandle) {
372        ALOGE("Failure: Camera already opened");
373        return ALREADY_EXISTS;
374    }
375    mCameraHandle = camera_open(mCameraId);
376    if (!mCameraHandle) {
377        ALOGE("camera_open failed.");
378        return UNKNOWN_ERROR;
379    }
380
381    mCameraOpened = true;
382
383    return NO_ERROR;
384}
385
386/*===========================================================================
387 * FUNCTION   : closeCamera
388 *
389 * DESCRIPTION: close camera
390 *
391 * PARAMETERS : none
392 *
393 * RETURN     : int32_t type of status
394 *              NO_ERROR  -- success
395 *              none-zero failure code
396 *==========================================================================*/
397int QCamera3HardwareInterface::closeCamera()
398{
399    int rc = NO_ERROR;
400
401    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
402    mCameraHandle = NULL;
403    mCameraOpened = false;
404
405#ifdef HAS_MULTIMEDIA_HINTS
406    if (rc == NO_ERROR) {
407        if (m_pPowerModule) {
408            if (m_pPowerModule->powerHint) {
409                if(mHdrHint == true) {
410                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
411                            (void *)"state=3");
412                    mHdrHint = false;
413                }
414                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
415                        (void *)"state=0");
416            }
417        }
418    }
419#endif
420
421    return rc;
422}
423
424/*===========================================================================
425 * FUNCTION   : initialize
426 *
427 * DESCRIPTION: Initialize frameworks callback functions
428 *
429 * PARAMETERS :
430 *   @callback_ops : callback function to frameworks
431 *
432 * RETURN     :
433 *
434 *==========================================================================*/
435int QCamera3HardwareInterface::initialize(
436        const struct camera3_callback_ops *callback_ops)
437{
438    int rc;
439
440    pthread_mutex_lock(&mMutex);
441
442    rc = initParameters();
443    if (rc < 0) {
444        ALOGE("%s: initParamters failed %d", __func__, rc);
445       goto err1;
446    }
447    mCallbackOps = callback_ops;
448
449    pthread_mutex_unlock(&mMutex);
450    mCameraInitialized = true;
451    return 0;
452
453err1:
454    pthread_mutex_unlock(&mMutex);
455    return rc;
456}
457
458/*===========================================================================
459 * FUNCTION   : configureStreams
460 *
461 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
462 *              and output streams.
463 *
464 * PARAMETERS :
465 *   @stream_list : streams to be configured
466 *
467 * RETURN     :
468 *
469 *==========================================================================*/
470int QCamera3HardwareInterface::configureStreams(
471        camera3_stream_configuration_t *streamList)
472{
473    int rc = 0;
474    mIsZslMode = false;
475
476    // Sanity check stream_list
477    if (streamList == NULL) {
478        ALOGE("%s: NULL stream configuration", __func__);
479        return BAD_VALUE;
480    }
481    if (streamList->streams == NULL) {
482        ALOGE("%s: NULL stream list", __func__);
483        return BAD_VALUE;
484    }
485
486    if (streamList->num_streams < 1) {
487        ALOGE("%s: Bad number of streams requested: %d", __func__,
488                streamList->num_streams);
489        return BAD_VALUE;
490    }
491
492    /* first invalidate all the steams in the mStreamList
493     * if they appear again, they will be validated */
494    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
495            it != mStreamInfo.end(); it++) {
496        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
497        channel->stop();
498        (*it)->status = INVALID;
499    }
500    if (mMetadataChannel) {
501        /* If content of mStreamInfo is not 0, there is metadata stream */
502        mMetadataChannel->stop();
503    }
504
505#ifdef HAS_MULTIMEDIA_HINTS
506    if(mHdrHint == true) {
507        if (m_pPowerModule) {
508            if (m_pPowerModule->powerHint) {
509                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
510                        (void *)"state=3");
511                mHdrHint = false;
512            }
513        }
514    }
515#endif
516
517    pthread_mutex_lock(&mMutex);
518
519    camera3_stream_t *inputStream = NULL;
520    camera3_stream_t *jpegStream = NULL;
521    cam_stream_size_info_t stream_config_info;
522
523    for (size_t i = 0; i < streamList->num_streams; i++) {
524        camera3_stream_t *newStream = streamList->streams[i];
525        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
526                __func__, newStream->stream_type, newStream->format,
527                 newStream->width, newStream->height);
528        //if the stream is in the mStreamList validate it
529        bool stream_exists = false;
530        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
531                it != mStreamInfo.end(); it++) {
532            if ((*it)->stream == newStream) {
533                QCamera3Channel *channel =
534                    (QCamera3Channel*)(*it)->stream->priv;
535                stream_exists = true;
536                (*it)->status = RECONFIGURE;
537                /*delete the channel object associated with the stream because
538                  we need to reconfigure*/
539                delete channel;
540                (*it)->stream->priv = NULL;
541                (*it)->channel = NULL;
542            }
543        }
544        if (!stream_exists) {
545            //new stream
546            stream_info_t* stream_info;
547            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
548            stream_info->stream = newStream;
549            stream_info->status = VALID;
550            stream_info->registered = 0;
551            stream_info->channel = NULL;
552            mStreamInfo.push_back(stream_info);
553        }
554        if (newStream->stream_type == CAMERA3_STREAM_INPUT
555                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
556            if (inputStream != NULL) {
557                ALOGE("%s: Multiple input streams requested!", __func__);
558                pthread_mutex_unlock(&mMutex);
559                return BAD_VALUE;
560            }
561            inputStream = newStream;
562        }
563        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
564            jpegStream = newStream;
565        }
566    }
567    mInputStream = inputStream;
568
569    /*clean up invalid streams*/
570    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
571            it != mStreamInfo.end();) {
572        if(((*it)->status) == INVALID){
573            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
574            delete channel;
575            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
576            free(*it);
577            it = mStreamInfo.erase(it);
578        } else {
579            it++;
580        }
581    }
582    if (mMetadataChannel) {
583        delete mMetadataChannel;
584        mMetadataChannel = NULL;
585    }
586
587    //Create metadata channel and initialize it
588    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
589                    mCameraHandle->ops, captureResultCb,
590                    &gCamCapability[mCameraId]->padding_info, this);
591    if (mMetadataChannel == NULL) {
592        ALOGE("%s: failed to allocate metadata channel", __func__);
593        rc = -ENOMEM;
594        pthread_mutex_unlock(&mMutex);
595        return rc;
596    }
597    rc = mMetadataChannel->initialize();
598    if (rc < 0) {
599        ALOGE("%s: metadata channel initialization failed", __func__);
600        delete mMetadataChannel;
601        mMetadataChannel = NULL;
602        pthread_mutex_unlock(&mMutex);
603        return rc;
604    }
605
606    /* Allocate channel objects for the requested streams */
607    for (size_t i = 0; i < streamList->num_streams; i++) {
608        camera3_stream_t *newStream = streamList->streams[i];
609        uint32_t stream_usage = newStream->usage;
610        stream_config_info.stream_sizes[i].width = newStream->width;
611        stream_config_info.stream_sizes[i].height = newStream->height;
612        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
613            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
614            //for zsl stream the size is jpeg size
615            stream_config_info.stream_sizes[i].width = jpegStream->width;
616            stream_config_info.stream_sizes[i].height = jpegStream->height;
617            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
618        } else {
619           //for non zsl streams find out the format
620           switch (newStream->format) {
621           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
622              {
623                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
624                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
625                 } else {
626                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
627                 }
628              }
629              break;
630           case HAL_PIXEL_FORMAT_YCbCr_420_888:
631              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
632#ifdef HAS_MULTIMEDIA_HINTS
633              if (m_pPowerModule) {
634                  if (m_pPowerModule->powerHint) {
635                      m_pPowerModule->powerHint(m_pPowerModule,
636                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
637                      mHdrHint = true;
638                  }
639              }
640#endif
641              break;
642           case HAL_PIXEL_FORMAT_BLOB:
643              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
644              break;
645           default:
646              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
647              break;
648           }
649        }
650        if (newStream->priv == NULL) {
651            //New stream, construct channel
652            switch (newStream->stream_type) {
653            case CAMERA3_STREAM_INPUT:
654                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
655                break;
656            case CAMERA3_STREAM_BIDIRECTIONAL:
657                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
658                    GRALLOC_USAGE_HW_CAMERA_WRITE;
659                break;
660            case CAMERA3_STREAM_OUTPUT:
661                /* For video encoding stream, set read/write rarely
662                 * flag so that they may be set to un-cached */
663                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
664                    newStream->usage =
665                         (GRALLOC_USAGE_SW_READ_RARELY |
666                         GRALLOC_USAGE_SW_WRITE_RARELY |
667                         GRALLOC_USAGE_HW_CAMERA_WRITE);
668                else
669                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
670                break;
671            default:
672                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
673                break;
674            }
675
676            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
677                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
678                QCamera3Channel *channel;
679                switch (newStream->format) {
680                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
681                case HAL_PIXEL_FORMAT_YCbCr_420_888:
682                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
683                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
684                        jpegStream) {
685                        uint32_t width = jpegStream->width;
686                        uint32_t height = jpegStream->height;
687                        mIsZslMode = true;
688                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
689                            mCameraHandle->ops, captureResultCb,
690                            &gCamCapability[mCameraId]->padding_info, this, newStream,
691                            width, height);
692                    } else
693                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
694                            mCameraHandle->ops, captureResultCb,
695                            &gCamCapability[mCameraId]->padding_info, this, newStream);
696                    if (channel == NULL) {
697                        ALOGE("%s: allocation of channel failed", __func__);
698                        pthread_mutex_unlock(&mMutex);
699                        return -ENOMEM;
700                    }
701
702                    newStream->priv = channel;
703                    break;
704                case HAL_PIXEL_FORMAT_BLOB:
705                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
706                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
707                            mCameraHandle->ops, captureResultCb,
708                            &gCamCapability[mCameraId]->padding_info, this, newStream);
709                    if (mPictureChannel == NULL) {
710                        ALOGE("%s: allocation of channel failed", __func__);
711                        pthread_mutex_unlock(&mMutex);
712                        return -ENOMEM;
713                    }
714                    newStream->priv = (QCamera3Channel*)mPictureChannel;
715                    break;
716
717                //TODO: Add support for app consumed format?
718                default:
719                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
720                    break;
721                }
722            }
723
724            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
725                    it != mStreamInfo.end(); it++) {
726                if ((*it)->stream == newStream) {
727                    (*it)->channel = (QCamera3Channel*) newStream->priv;
728                    break;
729                }
730            }
731        } else {
732            // Channel already exists for this stream
733            // Do nothing for now
734        }
735    }
736
737    int32_t hal_version = CAM_HAL_V3;
738    stream_config_info.num_streams = streamList->num_streams;
739
740    // settings/parameters don't carry over for new configureStreams
741    memset(mParameters, 0, sizeof(parm_buffer_t));
742
743    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
744    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
745                sizeof(hal_version), &hal_version);
746
747    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
748                sizeof(stream_config_info), &stream_config_info);
749
750    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
751
752    /*For the streams to be reconfigured we need to register the buffers
753      since the framework wont*/
754    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
755            it != mStreamInfo.end(); it++) {
756        if ((*it)->status == RECONFIGURE) {
757            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
758            /*only register buffers for streams that have already been
759              registered*/
760            if ((*it)->registered) {
761                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
762                        (*it)->buffer_set.buffers);
763                if (rc != NO_ERROR) {
764                    ALOGE("%s: Failed to register the buffers of old stream,\
765                            rc = %d", __func__, rc);
766                }
767                ALOGV("%s: channel %p has %d buffers",
768                        __func__, channel, (*it)->buffer_set.num_buffers);
769            }
770        }
771    }
772
773    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
774    mPendingRequestsList.clear();
775    mPendingFrameDropList.clear();
776    // Initialize/Reset the pending buffers list
777    mPendingBuffersMap.num_buffers = 0;
778    mPendingBuffersMap.mPendingBufferList.clear();
779
780    /*flush the metadata list*/
781    if (!mStoredMetadataList.empty()) {
782        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
783              m != mStoredMetadataList.end(); m++) {
784            mMetadataChannel->bufDone(m->meta_buf);
785            free(m->meta_buf);
786            m = mStoredMetadataList.erase(m);
787        }
788    }
789
790    mFirstRequest = true;
791
792    //Get min frame duration for this streams configuration
793    deriveMinFrameDuration();
794
795    pthread_mutex_unlock(&mMutex);
796    return rc;
797}
798
799/*===========================================================================
800 * FUNCTION   : validateCaptureRequest
801 *
802 * DESCRIPTION: validate a capture request from camera service
803 *
804 * PARAMETERS :
805 *   @request : request from framework to process
806 *
807 * RETURN     :
808 *
809 *==========================================================================*/
810int QCamera3HardwareInterface::validateCaptureRequest(
811                    camera3_capture_request_t *request)
812{
813    ssize_t idx = 0;
814    const camera3_stream_buffer_t *b;
815    CameraMetadata meta;
816
817    /* Sanity check the request */
818    if (request == NULL) {
819        ALOGE("%s: NULL capture request", __func__);
820        return BAD_VALUE;
821    }
822
823    uint32_t frameNumber = request->frame_number;
824    if (request->input_buffer != NULL &&
825            request->input_buffer->stream != mInputStream) {
826        ALOGE("%s: Request %d: Input buffer not from input stream!",
827                __FUNCTION__, frameNumber);
828        return BAD_VALUE;
829    }
830    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
831        ALOGE("%s: Request %d: No output buffers provided!",
832                __FUNCTION__, frameNumber);
833        return BAD_VALUE;
834    }
835    if (request->input_buffer != NULL) {
836        b = request->input_buffer;
837        QCamera3Channel *channel =
838            static_cast<QCamera3Channel*>(b->stream->priv);
839        if (channel == NULL) {
840            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
841                    __func__, frameNumber, idx);
842            return BAD_VALUE;
843        }
844        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
845            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
846                    __func__, frameNumber, idx);
847            return BAD_VALUE;
848        }
849        if (b->release_fence != -1) {
850            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
851                    __func__, frameNumber, idx);
852            return BAD_VALUE;
853        }
854        if (b->buffer == NULL) {
855            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
856                    __func__, frameNumber, idx);
857            return BAD_VALUE;
858        }
859    }
860
861    // Validate all buffers
862    b = request->output_buffers;
863    do {
864        QCamera3Channel *channel =
865                static_cast<QCamera3Channel*>(b->stream->priv);
866        if (channel == NULL) {
867            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
868                    __func__, frameNumber, idx);
869            return BAD_VALUE;
870        }
871        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
872            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
873                    __func__, frameNumber, idx);
874            return BAD_VALUE;
875        }
876        if (b->release_fence != -1) {
877            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
878                    __func__, frameNumber, idx);
879            return BAD_VALUE;
880        }
881        if (b->buffer == NULL) {
882            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
883                    __func__, frameNumber, idx);
884            return BAD_VALUE;
885        }
886        idx++;
887        b = request->output_buffers + idx;
888    } while (idx < (ssize_t)request->num_output_buffers);
889
890    return NO_ERROR;
891}
892
893/*===========================================================================
894 * FUNCTION   : deriveMinFrameDuration
895 *
896 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
897 *              on currently configured streams.
898 *
899 * PARAMETERS : NONE
900 *
901 * RETURN     : NONE
902 *
903 *==========================================================================*/
904void QCamera3HardwareInterface::deriveMinFrameDuration()
905{
906    int32_t maxJpegDimension, maxProcessedDimension;
907
908    maxJpegDimension = 0;
909    maxProcessedDimension = 0;
910
911    // Figure out maximum jpeg, processed, and raw dimensions
912    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
913        it != mStreamInfo.end(); it++) {
914
915        // Input stream doesn't have valid stream_type
916        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
917            continue;
918
919        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
920        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
921            if (dimension > maxJpegDimension)
922                maxJpegDimension = dimension;
923        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
924            if (dimension > maxProcessedDimension)
925                maxProcessedDimension = dimension;
926        }
927    }
928
929    //Assume all jpeg dimensions are in processed dimensions.
930    if (maxJpegDimension > maxProcessedDimension)
931        maxProcessedDimension = maxJpegDimension;
932
933    //Find minimum durations for processed, jpeg, and raw
934    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
935    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
936        if (maxProcessedDimension ==
937            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
938            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
939            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
940            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
941            break;
942        }
943    }
944}
945
946/*===========================================================================
947 * FUNCTION   : getMinFrameDuration
948 *
949 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
950 *              and current request configuration.
951 *
952 * PARAMETERS : @request: requset sent by the frameworks
953 *
954 * RETURN     : min farme duration for a particular request
955 *
956 *==========================================================================*/
957int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
958{
959    bool hasJpegStream = false;
960    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
961        const camera3_stream_t *stream = request->output_buffers[i].stream;
962        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
963            hasJpegStream = true;
964    }
965
966    if (!hasJpegStream)
967        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
968    else
969        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
970}
971
972/*===========================================================================
973 * FUNCTION   : handleMetadataWithLock
974 *
975 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
976 *
977 * PARAMETERS : @metadata_buf: metadata buffer
978 *
979 * RETURN     :
980 *
981 *==========================================================================*/
982void QCamera3HardwareInterface::handleMetadataWithLock(
983    mm_camera_super_buf_t *metadata_buf)
984{
985    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
986    int32_t frame_number_valid = *(int32_t *)
987        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
988    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
989        CAM_INTF_META_PENDING_REQUESTS, metadata);
990    uint32_t frame_number = *(uint32_t *)
991        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
992    const struct timeval *tv = (const struct timeval *)
993        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
994    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
995        tv->tv_usec * NSEC_PER_USEC;
996    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
997        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
998
999    int32_t urgent_frame_number_valid = *(int32_t *)
1000        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1001    uint32_t urgent_frame_number = *(uint32_t *)
1002        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1003
1004    if (urgent_frame_number_valid) {
1005        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1006          __func__, urgent_frame_number, capture_time);
1007
1008        //Recieved an urgent Frame Number, handle it
1009        //using HAL3.1 quirk for partial results
1010        for (List<PendingRequestInfo>::iterator i =
1011            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1012            camera3_notify_msg_t notify_msg;
1013            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1014                __func__, i->frame_number, urgent_frame_number);
1015
1016            if (i->frame_number < urgent_frame_number &&
1017                i->bNotified == 0) {
1018                notify_msg.type = CAMERA3_MSG_SHUTTER;
1019                notify_msg.message.shutter.frame_number = i->frame_number;
1020                notify_msg.message.shutter.timestamp = capture_time -
1021                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1022                mCallbackOps->notify(mCallbackOps, &notify_msg);
1023                i->timestamp = notify_msg.message.shutter.timestamp;
1024                i->bNotified = 1;
1025                ALOGV("%s: Dummy notification !!!! notify frame_number = %d, capture_time = %lld",
1026                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1027            }
1028
1029            if (i->frame_number == urgent_frame_number) {
1030
1031                camera3_capture_result_t result;
1032
1033                // Send shutter notify to frameworks
1034                notify_msg.type = CAMERA3_MSG_SHUTTER;
1035                notify_msg.message.shutter.frame_number = i->frame_number;
1036                notify_msg.message.shutter.timestamp = capture_time;
1037                mCallbackOps->notify(mCallbackOps, &notify_msg);
1038
1039                i->timestamp = capture_time;
1040                i->bNotified = 1;
1041
1042                // Extract 3A metadata
1043                result.result =
1044                    translateCbUrgentMetadataToResultMetadata(metadata);
1045                // Populate metadata result
1046                result.frame_number = urgent_frame_number;
1047                result.num_output_buffers = 0;
1048                result.output_buffers = NULL;
1049                mCallbackOps->process_capture_result(mCallbackOps, &result);
1050                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1051                     __func__, result.frame_number, capture_time);
1052                free_camera_metadata((camera_metadata_t *)result.result);
1053                break;
1054            }
1055        }
1056    }
1057
1058    if (!frame_number_valid) {
1059        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1060        mMetadataChannel->bufDone(metadata_buf);
1061        free(metadata_buf);
1062        goto done_metadata;
1063    }
1064    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1065            frame_number, capture_time);
1066
1067    // Go through the pending requests info and send shutter/results to frameworks
1068    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1069        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1070        camera3_capture_result_t result;
1071        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1072
1073        // Flush out all entries with less or equal frame numbers.
1074        mPendingRequest--;
1075
1076        // Check whether any stream buffer corresponding to this is dropped or not
1077        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1078        // buffer with CAMERA3_BUFFER_STATUS_ERROR
1079        if (cam_frame_drop.frame_dropped) {
1080            camera3_notify_msg_t notify_msg;
1081            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1082                    j != i->buffers.end(); j++) {
1083                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1084                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1085                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1086                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1087                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1088                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1089                             __func__, i->frame_number, streamID);
1090                      notify_msg.type = CAMERA3_MSG_ERROR;
1091                      notify_msg.message.error.frame_number = i->frame_number;
1092                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1093                      notify_msg.message.error.error_stream = j->stream;
1094                      mCallbackOps->notify(mCallbackOps, &notify_msg);
1095                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1096                             __func__, i->frame_number, streamID);
1097                      PendingFrameDropInfo PendingFrameDrop;
1098                      PendingFrameDrop.frame_number=i->frame_number;
1099                      PendingFrameDrop.stream_ID = streamID;
1100                      // Add the Frame drop info to mPendingFrameDropList
1101                      mPendingFrameDropList.push_back(PendingFrameDrop);
1102                  }
1103                }
1104            }
1105        }
1106
1107        // Send empty metadata with already filled buffers for dropped metadata
1108        // and send valid metadata with already filled buffers for current metadata
1109        if (i->frame_number < frame_number) {
1110            CameraMetadata dummyMetadata;
1111            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1112                    &i->timestamp, 1);
1113            dummyMetadata.update(ANDROID_REQUEST_ID,
1114                    &(i->request_id), 1);
1115            result.result = dummyMetadata.release();
1116        } else {
1117            result.result = translateCbMetadataToResultMetadata(metadata,
1118                    i->timestamp, i->request_id, i->blob_request,
1119                    &(i->input_jpeg_settings));
1120            if (mIsZslMode) {
1121                int found_metadata = 0;
1122                //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1123                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1124                    j != i->buffers.end(); j++) {
1125                    if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1126                        //check if corresp. zsl already exists in the stored metadata list
1127                        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1128                                m != mStoredMetadataList.begin(); m++) {
1129                            if (m->frame_number == frame_number) {
1130                                m->meta_buf = metadata_buf;
1131                                found_metadata = 1;
1132                                break;
1133                            }
1134                        }
1135                        if (!found_metadata) {
1136                            MetadataBufferInfo store_meta_info;
1137                            store_meta_info.meta_buf = metadata_buf;
1138                            store_meta_info.frame_number = frame_number;
1139                            mStoredMetadataList.push_back(store_meta_info);
1140                            found_metadata = 1;
1141                        }
1142                    }
1143                }
1144                if (!found_metadata) {
1145                    if (!i->input_buffer_present && i->blob_request) {
1146                        //livesnapshot or fallback non-zsl snapshot case
1147                        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1148                                j != i->buffers.end(); j++){
1149                            if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1150                                j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1151                                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1152                                break;
1153                            }
1154                        }
1155                    } else {
1156                        //return the metadata immediately
1157                        mMetadataChannel->bufDone(metadata_buf);
1158                        free(metadata_buf);
1159                    }
1160                }
1161            } else if (!mIsZslMode && i->blob_request) {
1162                //If it is a blob request then send the metadata to the picture channel
1163                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1164            } else {
1165                // Return metadata buffer
1166                mMetadataChannel->bufDone(metadata_buf);
1167                free(metadata_buf);
1168            }
1169        }
1170        if (!result.result) {
1171            ALOGE("%s: metadata is NULL", __func__);
1172        }
1173        result.frame_number = i->frame_number;
1174        result.num_output_buffers = 0;
1175        result.output_buffers = NULL;
1176        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1177                    j != i->buffers.end(); j++) {
1178            if (j->buffer) {
1179                result.num_output_buffers++;
1180            }
1181        }
1182
1183        if (result.num_output_buffers > 0) {
1184            camera3_stream_buffer_t *result_buffers =
1185                new camera3_stream_buffer_t[result.num_output_buffers];
1186            if (!result_buffers) {
1187                ALOGE("%s: Fatal error: out of memory", __func__);
1188            }
1189            size_t result_buffers_idx = 0;
1190            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1191                    j != i->buffers.end(); j++) {
1192                if (j->buffer) {
1193                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1194                            m != mPendingFrameDropList.end(); m++) {
1195                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1196                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1197                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1198                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1199                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1200                                  __func__, frame_number, streamID);
1201                            m = mPendingFrameDropList.erase(m);
1202                            break;
1203                        }
1204                    }
1205
1206                    for (List<PendingBufferInfo>::iterator k =
1207                      mPendingBuffersMap.mPendingBufferList.begin();
1208                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1209                      if (k->buffer == j->buffer->buffer) {
1210                        ALOGV("%s: Found buffer %p in pending buffer List "
1211                              "for frame %d, Take it out!!", __func__,
1212                               k->buffer, k->frame_number);
1213                        mPendingBuffersMap.num_buffers--;
1214                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
1215                        break;
1216                      }
1217                    }
1218
1219                    result_buffers[result_buffers_idx++] = *(j->buffer);
1220                    free(j->buffer);
1221                    j->buffer = NULL;
1222                }
1223            }
1224            result.output_buffers = result_buffers;
1225
1226            mCallbackOps->process_capture_result(mCallbackOps, &result);
1227            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1228                    __func__, result.frame_number, i->timestamp);
1229            free_camera_metadata((camera_metadata_t *)result.result);
1230            delete[] result_buffers;
1231        } else {
1232            mCallbackOps->process_capture_result(mCallbackOps, &result);
1233            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1234                        __func__, result.frame_number, i->timestamp);
1235            free_camera_metadata((camera_metadata_t *)result.result);
1236        }
1237        // erase the element from the list
1238        i = mPendingRequestsList.erase(i);
1239    }
1240
1241done_metadata:
1242    if (!pending_requests)
1243        unblockRequestIfNecessary();
1244
1245}
1246
1247/*===========================================================================
1248 * FUNCTION   : handleBufferWithLock
1249 *
1250 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1251 *
1252 * PARAMETERS : @buffer: image buffer for the callback
1253 *              @frame_number: frame number of the image buffer
1254 *
1255 * RETURN     :
1256 *
1257 *==========================================================================*/
1258void QCamera3HardwareInterface::handleBufferWithLock(
1259    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1260{
1261    // If the frame number doesn't exist in the pending request list,
1262    // directly send the buffer to the frameworks, and update pending buffers map
1263    // Otherwise, book-keep the buffer.
1264    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1265    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1266        i++;
1267    }
1268    if (i == mPendingRequestsList.end()) {
1269        // Verify all pending requests frame_numbers are greater
1270        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1271                j != mPendingRequestsList.end(); j++) {
1272            if (j->frame_number < frame_number) {
1273                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1274                        __func__, j->frame_number, frame_number);
1275            }
1276        }
1277        camera3_capture_result_t result;
1278        result.result = NULL;
1279        result.frame_number = frame_number;
1280        result.num_output_buffers = 1;
1281        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1282                m != mPendingFrameDropList.end(); m++) {
1283            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1284            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1285            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1286                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1287                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1288                        __func__, frame_number, streamID);
1289                m = mPendingFrameDropList.erase(m);
1290                break;
1291            }
1292        }
1293        result.output_buffers = buffer;
1294        ALOGV("%s: result frame_number = %d, buffer = %p",
1295                __func__, frame_number, buffer->buffer);
1296
1297        for (List<PendingBufferInfo>::iterator k =
1298                mPendingBuffersMap.mPendingBufferList.begin();
1299                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1300            if (k->buffer == buffer->buffer) {
1301                ALOGV("%s: Found Frame buffer, take it out from list",
1302                        __func__);
1303
1304                mPendingBuffersMap.num_buffers--;
1305                k = mPendingBuffersMap.mPendingBufferList.erase(k);
1306                break;
1307            }
1308        }
1309        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1310            __func__, mPendingBuffersMap.num_buffers);
1311
1312        if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1313            int found = 0;
1314            for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1315                k != mStoredMetadataList.end(); k++) {
1316                if (k->frame_number == frame_number) {
1317                    k->zsl_buf_hdl = buffer->buffer;
1318                    found = 1;
1319                    break;
1320                }
1321            }
1322            if (!found) {
1323                MetadataBufferInfo meta_info;
1324                meta_info.frame_number = frame_number;
1325                meta_info.zsl_buf_hdl = buffer->buffer;
1326                mStoredMetadataList.push_back(meta_info);
1327            }
1328        }
1329        mCallbackOps->process_capture_result(mCallbackOps, &result);
1330    } else {
1331        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1332                j != i->buffers.end(); j++) {
1333            if (j->stream == buffer->stream) {
1334                if (j->buffer != NULL) {
1335                    ALOGE("%s: Error: buffer is already set", __func__);
1336                } else {
1337                    j->buffer = (camera3_stream_buffer_t *)malloc(
1338                            sizeof(camera3_stream_buffer_t));
1339                    *(j->buffer) = *buffer;
1340                    ALOGV("%s: cache buffer %p at result frame_number %d",
1341                            __func__, buffer, frame_number);
1342                }
1343            }
1344        }
1345    }
1346}
1347
1348/*===========================================================================
1349 * FUNCTION   : unblockRequestIfNecessary
1350 *
1351 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1352 *              that mMutex is held when this function is called.
1353 *
1354 * PARAMETERS :
1355 *
1356 * RETURN     :
1357 *
1358 *==========================================================================*/
1359void QCamera3HardwareInterface::unblockRequestIfNecessary()
1360{
1361    bool max_buffers_dequeued = false;
1362
1363    uint32_t queued_buffers = 0;
1364    for(List<stream_info_t*>::iterator it=mStreamInfo.begin();
1365        it != mStreamInfo.end(); it++) {
1366        queued_buffers = 0;
1367        for (List<PendingBufferInfo>::iterator k =
1368            mPendingBuffersMap.mPendingBufferList.begin();
1369            k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1370            if (k->stream == (*it)->stream)
1371                queued_buffers++;
1372
1373            ALOGV("%s: Dequeued %d buffers for stream %p", __func__,
1374                queued_buffers, (*it)->stream);
1375            if (queued_buffers >=(* it)->stream->max_buffers) {
1376                ALOGV("%s: Wait!!! Max buffers Dequed", __func__);
1377                max_buffers_dequeued = true;
1378                break;
1379            }
1380        }
1381    }
1382
1383    if (!max_buffers_dequeued) {
1384        // Unblock process_capture_request
1385        pthread_cond_signal(&mRequestCond);
1386    }
1387}
1388
1389/*===========================================================================
1390 * FUNCTION   : registerStreamBuffers
1391 *
1392 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1393 *
1394 * PARAMETERS :
1395 *   @stream_list : streams to be configured
1396 *
1397 * RETURN     :
1398 *
1399 *==========================================================================*/
1400int QCamera3HardwareInterface::registerStreamBuffers(
1401        const camera3_stream_buffer_set_t *buffer_set)
1402{
1403    int rc = 0;
1404
1405    pthread_mutex_lock(&mMutex);
1406
1407    if (buffer_set == NULL) {
1408        ALOGE("%s: Invalid buffer_set parameter.", __func__);
1409        pthread_mutex_unlock(&mMutex);
1410        return -EINVAL;
1411    }
1412    if (buffer_set->stream == NULL) {
1413        ALOGE("%s: Invalid stream parameter.", __func__);
1414        pthread_mutex_unlock(&mMutex);
1415        return -EINVAL;
1416    }
1417    if (buffer_set->num_buffers < 1) {
1418        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
1419        pthread_mutex_unlock(&mMutex);
1420        return -EINVAL;
1421    }
1422    if (buffer_set->buffers == NULL) {
1423        ALOGE("%s: Invalid buffers parameter.", __func__);
1424        pthread_mutex_unlock(&mMutex);
1425        return -EINVAL;
1426    }
1427
1428    camera3_stream_t *stream = buffer_set->stream;
1429    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
1430
1431    //set the buffer_set in the mStreamInfo array
1432    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1433            it != mStreamInfo.end(); it++) {
1434        if ((*it)->stream == stream) {
1435            uint32_t numBuffers = buffer_set->num_buffers;
1436            (*it)->buffer_set.stream = buffer_set->stream;
1437            (*it)->buffer_set.num_buffers = numBuffers;
1438            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
1439            if ((*it)->buffer_set.buffers == NULL) {
1440                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
1441                pthread_mutex_unlock(&mMutex);
1442                return -ENOMEM;
1443            }
1444            for (size_t j = 0; j < numBuffers; j++){
1445                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
1446            }
1447            (*it)->registered = 1;
1448        }
1449    }
1450    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
1451    if (rc < 0) {
1452        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
1453        pthread_mutex_unlock(&mMutex);
1454        return -ENODEV;
1455    }
1456
1457    pthread_mutex_unlock(&mMutex);
1458    return NO_ERROR;
1459}
1460
1461/*===========================================================================
1462 * FUNCTION   : processCaptureRequest
1463 *
1464 * DESCRIPTION: process a capture request from camera service
1465 *
1466 * PARAMETERS :
1467 *   @request : request from framework to process
1468 *
1469 * RETURN     :
1470 *
1471 *==========================================================================*/
1472int QCamera3HardwareInterface::processCaptureRequest(
1473                    camera3_capture_request_t *request)
1474{
1475    int rc = NO_ERROR;
1476    int32_t request_id;
1477    CameraMetadata meta;
1478    MetadataBufferInfo reproc_meta;
1479    int queueMetadata = 0;
1480
1481    pthread_mutex_lock(&mMutex);
1482
1483    rc = validateCaptureRequest(request);
1484    if (rc != NO_ERROR) {
1485        ALOGE("%s: incoming request is not valid", __func__);
1486        pthread_mutex_unlock(&mMutex);
1487        return rc;
1488    }
1489
1490    meta = request->settings;
1491
1492    // For first capture request, send capture intent, and
1493    // stream on all streams
1494    if (mFirstRequest) {
1495
1496        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1497            int32_t hal_version = CAM_HAL_V3;
1498            uint8_t captureIntent =
1499                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1500
1501            memset(mParameters, 0, sizeof(parm_buffer_t));
1502            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1503            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1504                sizeof(hal_version), &hal_version);
1505            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1506                sizeof(captureIntent), &captureIntent);
1507            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1508                mParameters);
1509        }
1510
1511        mMetadataChannel->start();
1512        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1513            it != mStreamInfo.end(); it++) {
1514            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1515            channel->start();
1516        }
1517    }
1518
1519    uint32_t frameNumber = request->frame_number;
1520    cam_stream_ID_t streamID;
1521
1522    if (meta.exists(ANDROID_REQUEST_ID)) {
1523        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1524        mCurrentRequestId = request_id;
1525        ALOGV("%s: Received request with id: %d",__func__, request_id);
1526    } else if (mFirstRequest || mCurrentRequestId == -1){
1527        ALOGE("%s: Unable to find request id field, \
1528                & no previous id available", __func__);
1529        return NAME_NOT_FOUND;
1530    } else {
1531        ALOGV("%s: Re-using old request id", __func__);
1532        request_id = mCurrentRequestId;
1533    }
1534
1535    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1536                                    __func__, __LINE__,
1537                                    request->num_output_buffers,
1538                                    request->input_buffer,
1539                                    frameNumber);
1540    // Acquire all request buffers first
1541    int blob_request = 0;
1542    for (size_t i = 0; i < request->num_output_buffers; i++) {
1543        const camera3_stream_buffer_t& output = request->output_buffers[i];
1544        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1545        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1546
1547        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1548            //Call function to store local copy of jpeg data for encode params.
1549            blob_request = 1;
1550            rc = getJpegSettings(request->settings);
1551            if (rc < 0) {
1552                ALOGE("%s: failed to get jpeg parameters", __func__);
1553                pthread_mutex_unlock(&mMutex);
1554                return rc;
1555            }
1556        }
1557
1558        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1559        if (rc != OK) {
1560            ALOGE("%s: fence wait failed %d", __func__, rc);
1561            pthread_mutex_unlock(&mMutex);
1562            return rc;
1563        }
1564        streamID.streamID[i]=channel->getStreamID(channel->getStreamTypeMask());
1565    }
1566    streamID.num_streams=request->num_output_buffers;
1567
1568    rc = setFrameParameters(request, streamID);
1569    if (rc < 0) {
1570        ALOGE("%s: fail to set frame parameters", __func__);
1571        pthread_mutex_unlock(&mMutex);
1572        return rc;
1573    }
1574
1575    /* Update pending request list and pending buffers map */
1576    PendingRequestInfo pendingRequest;
1577    pendingRequest.frame_number = frameNumber;
1578    pendingRequest.num_buffers = request->num_output_buffers;
1579    pendingRequest.request_id = request_id;
1580    pendingRequest.blob_request = blob_request;
1581    pendingRequest.bNotified = 0;
1582    if (blob_request)
1583        pendingRequest.input_jpeg_settings = *mJpegSettings;
1584    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1585
1586    for (size_t i = 0; i < request->num_output_buffers; i++) {
1587        RequestedBufferInfo requestedBuf;
1588        requestedBuf.stream = request->output_buffers[i].stream;
1589        requestedBuf.buffer = NULL;
1590        pendingRequest.buffers.push_back(requestedBuf);
1591
1592        // Add to buffer handle the pending buffers list
1593        PendingBufferInfo bufferInfo;
1594        bufferInfo.frame_number = frameNumber;
1595        bufferInfo.buffer = request->output_buffers[i].buffer;
1596        bufferInfo.stream = request->output_buffers[i].stream;
1597        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1598        mPendingBuffersMap.num_buffers++;
1599        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1600          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1601          bufferInfo.stream->format);
1602    }
1603    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1604          __func__, mPendingBuffersMap.num_buffers);
1605    mPendingRequestsList.push_back(pendingRequest);
1606
1607    // Notify metadata channel we receive a request
1608    mMetadataChannel->request(NULL, frameNumber);
1609
1610    // Call request on other streams
1611    for (size_t i = 0; i < request->num_output_buffers; i++) {
1612        const camera3_stream_buffer_t& output = request->output_buffers[i];
1613        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1614        mm_camera_buf_def_t *pInputBuffer = NULL;
1615
1616        if (channel == NULL) {
1617            ALOGE("%s: invalid channel pointer for stream", __func__);
1618            continue;
1619        }
1620
1621        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1622            QCamera3RegularChannel* inputChannel = NULL;
1623            if(request->input_buffer != NULL){
1624                //Try to get the internal format
1625                inputChannel = (QCamera3RegularChannel*)
1626                    request->input_buffer->stream->priv;
1627                if(inputChannel == NULL ){
1628                    ALOGE("%s: failed to get input channel handle", __func__);
1629                } else {
1630                    pInputBuffer =
1631                        inputChannel->getInternalFormatBuffer(
1632                                request->input_buffer->buffer);
1633                    ALOGD("%s: Input buffer dump",__func__);
1634                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1635                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1636                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1637                    ALOGD("Handle:%p", request->input_buffer->buffer);
1638                    //TODO: need to get corresponding metadata and send it to pproc
1639                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1640                         m != mStoredMetadataList.end(); m++) {
1641                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1642                            reproc_meta.meta_buf = m->meta_buf;
1643                            queueMetadata = 1;
1644                            break;
1645                        }
1646                    }
1647                }
1648            }
1649            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1650                            pInputBuffer,(QCamera3Channel*)inputChannel);
1651            if (queueMetadata) {
1652                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1653            }
1654        } else {
1655            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1656                __LINE__, output.buffer, frameNumber);
1657            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1658                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1659                     m != mStoredMetadataList.end(); m++) {
1660                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1661                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1662                            mMetadataChannel->bufDone(m->meta_buf);
1663                            free(m->meta_buf);
1664                            m = mStoredMetadataList.erase(m);
1665                            break;
1666                        }
1667                   }
1668                }
1669            }
1670            rc = channel->request(output.buffer, frameNumber);
1671        }
1672        if (rc < 0)
1673            ALOGE("%s: request failed", __func__);
1674    }
1675
1676    mFirstRequest = false;
1677    // Added a timed condition wait
1678    struct timespec ts;
1679    uint8_t isValidTimeout = 1;
1680    rc = clock_gettime(CLOCK_REALTIME, &ts);
1681    if (rc < 0) {
1682        isValidTimeout = 0;
1683        ALOGE("%s: Error reading the real time clock!!", __func__);
1684    }
1685    else {
1686        // Make timeout as 5 sec for request to be honored
1687        ts.tv_sec += 5;
1688    }
1689    //Block on conditional variable
1690    mPendingRequest++;
1691    do {
1692        if (!isValidTimeout) {
1693            ALOGV("%s: Blocking on conditional wait", __func__);
1694            pthread_cond_wait(&mRequestCond, &mMutex);
1695        }
1696        else {
1697            ALOGV("%s: Blocking on timed conditional wait", __func__);
1698            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1699            if (rc == ETIMEDOUT) {
1700                rc = -ENODEV;
1701                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1702                break;
1703            }
1704        }
1705        ALOGV("%s: Unblocked", __func__);
1706    }while (mPendingRequest >= kMaxInFlight);
1707
1708    pthread_mutex_unlock(&mMutex);
1709
1710    return rc;
1711}
1712
1713/*===========================================================================
1714 * FUNCTION   : getMetadataVendorTagOps
1715 *
1716 * DESCRIPTION:
1717 *
1718 * PARAMETERS :
1719 *
1720 *
1721 * RETURN     :
1722 *==========================================================================*/
1723void QCamera3HardwareInterface::getMetadataVendorTagOps(
1724                    vendor_tag_query_ops_t* /*ops*/)
1725{
1726    /* Enable locks when we eventually add Vendor Tags */
1727    /*
1728    pthread_mutex_lock(&mMutex);
1729
1730    pthread_mutex_unlock(&mMutex);
1731    */
1732    return;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION   : dump
1737 *
1738 * DESCRIPTION:
1739 *
1740 * PARAMETERS :
1741 *
1742 *
1743 * RETURN     :
1744 *==========================================================================*/
1745void QCamera3HardwareInterface::dump(int /*fd*/)
1746{
1747    /*Enable lock when we implement this function*/
1748    /*
1749    pthread_mutex_lock(&mMutex);
1750
1751    pthread_mutex_unlock(&mMutex);
1752    */
1753    return;
1754}
1755
1756/*===========================================================================
1757 * FUNCTION   : flush
1758 *
1759 * DESCRIPTION:
1760 *
1761 * PARAMETERS :
1762 *
1763 *
1764 * RETURN     :
1765 *==========================================================================*/
1766int QCamera3HardwareInterface::flush()
1767{
1768
1769    unsigned int frameNum = 0;
1770    camera3_notify_msg_t notify_msg;
1771    camera3_capture_result_t result;
1772    camera3_stream_buffer_t pStream_Buf;
1773
1774    ALOGV("%s: Unblocking Process Capture Request", __func__);
1775
1776    // Stop the Streams/Channels
1777    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1778        it != mStreamInfo.end(); it++) {
1779        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1780        channel->stop();
1781        (*it)->status = INVALID;
1782    }
1783
1784    if (mMetadataChannel) {
1785        /* If content of mStreamInfo is not 0, there is metadata stream */
1786        mMetadataChannel->stop();
1787    }
1788
1789    // Mutex Lock
1790    pthread_mutex_lock(&mMutex);
1791
1792    // Unblock process_capture_request
1793    mPendingRequest = 0;
1794    pthread_cond_signal(&mRequestCond);
1795
1796    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1797    frameNum = i->frame_number;
1798    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
1799      __func__, frameNum);
1800
1801    // Go through the pending buffers and send buffer errors
1802    for (List<PendingBufferInfo>::iterator k =
1803         mPendingBuffersMap.mPendingBufferList.begin();
1804         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
1805         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1806          __func__, k->frame_number, k->buffer, k->stream,
1807          k->stream->format);
1808
1809        if (k->frame_number < frameNum) {
1810            // Send Error notify to frameworks for each buffer for which
1811            // metadata buffer is already sent
1812            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
1813              __func__, k->frame_number, k->buffer);
1814
1815            notify_msg.type = CAMERA3_MSG_ERROR;
1816            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1817            notify_msg.message.error.error_stream = k->stream;
1818            notify_msg.message.error.frame_number = k->frame_number;
1819            mCallbackOps->notify(mCallbackOps, &notify_msg);
1820            ALOGV("%s: notify frame_number = %d", __func__,
1821                    i->frame_number);
1822
1823            pStream_Buf.acquire_fence = -1;
1824            pStream_Buf.release_fence = -1;
1825            pStream_Buf.buffer = k->buffer;
1826            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1827            pStream_Buf.stream = k->stream;
1828
1829            result.result = NULL;
1830            result.frame_number = k->frame_number;
1831            result.num_output_buffers = 1;
1832            result.output_buffers = &pStream_Buf ;
1833            mCallbackOps->process_capture_result(mCallbackOps, &result);
1834
1835            mPendingBuffersMap.num_buffers--;
1836            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1837        }
1838        else {
1839          k++;
1840        }
1841    }
1842
1843    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1844
1845    // Go through the pending requests info and send error request to framework
1846    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
1847        int numBuffers = 0;
1848        ALOGV("%s:Sending ERROR REQUEST for frame %d",
1849              __func__, i->frame_number);
1850
1851        // Send shutter notify to frameworks
1852        notify_msg.type = CAMERA3_MSG_ERROR;
1853        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
1854        notify_msg.message.error.error_stream = NULL;
1855        notify_msg.message.error.frame_number = i->frame_number;
1856        mCallbackOps->notify(mCallbackOps, &notify_msg);
1857
1858        result.frame_number = i->frame_number;
1859        result.num_output_buffers = 0;
1860        result.output_buffers = NULL;
1861        numBuffers = 0;
1862
1863        for (List<PendingBufferInfo>::iterator k =
1864             mPendingBuffersMap.mPendingBufferList.begin();
1865             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
1866          if (k->frame_number == i->frame_number) {
1867            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
1868                   " stream = %p, stream format = %d",__func__,
1869                   k->frame_number, k->buffer, k->stream, k->stream->format);
1870
1871            pStream_Buf.acquire_fence = -1;
1872            pStream_Buf.release_fence = -1;
1873            pStream_Buf.buffer = k->buffer;
1874            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1875            pStream_Buf.stream = k->stream;
1876
1877            result.num_output_buffers = 1;
1878            result.output_buffers = &pStream_Buf;
1879            result.result = NULL;
1880            result.frame_number = i->frame_number;
1881
1882            mCallbackOps->process_capture_result(mCallbackOps, &result);
1883            mPendingBuffersMap.num_buffers--;
1884            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1885            numBuffers++;
1886          }
1887          else {
1888            k++;
1889          }
1890        }
1891        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1892              __func__, mPendingBuffersMap.num_buffers);
1893
1894        i = mPendingRequestsList.erase(i);
1895    }
1896
1897    /* Reset pending buffer list and requests list */
1898    mPendingRequestsList.clear();
1899    /* Reset pending frame Drop list and requests list */
1900    mPendingFrameDropList.clear();
1901
1902    mPendingBuffersMap.num_buffers = 0;
1903    mPendingBuffersMap.mPendingBufferList.clear();
1904    ALOGV("%s: Cleared all the pending buffers ", __func__);
1905
1906    /*flush the metadata list*/
1907    if (!mStoredMetadataList.empty()) {
1908        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1909              m != mStoredMetadataList.end(); ) {
1910            mMetadataChannel->bufDone(m->meta_buf);
1911            free(m->meta_buf);
1912            m = mStoredMetadataList.erase(m);
1913        }
1914    }
1915    ALOGV("%s: Flushing the metadata list done!! ", __func__);
1916
1917    mFirstRequest = true;
1918    pthread_mutex_unlock(&mMutex);
1919    return 0;
1920}
1921
1922/*===========================================================================
1923 * FUNCTION   : captureResultCb
1924 *
1925 * DESCRIPTION: Callback handler for all capture result
1926 *              (streams, as well as metadata)
1927 *
1928 * PARAMETERS :
1929 *   @metadata : metadata information
1930 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1931 *               NULL if metadata.
1932 *
1933 * RETURN     : NONE
1934 *==========================================================================*/
1935void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1936                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1937{
1938    pthread_mutex_lock(&mMutex);
1939
1940    if (metadata_buf)
1941        handleMetadataWithLock(metadata_buf);
1942    else
1943        handleBufferWithLock(buffer, frame_number);
1944
1945    pthread_mutex_unlock(&mMutex);
1946    return;
1947}
1948
1949/*===========================================================================
1950 * FUNCTION   : translateCbMetadataToResultMetadata
1951 *
1952 * DESCRIPTION:
1953 *
1954 * PARAMETERS :
1955 *   @metadata : metadata information from callback
1956 *
1957 * RETURN     : camera_metadata_t*
1958 *              metadata in a format specified by fwk
1959 *==========================================================================*/
1960camera_metadata_t*
1961QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1962                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1963                                 int32_t request_id, int32_t BlobRequest,
1964                                 jpeg_settings_t* inputjpegsettings)
1965{
1966    CameraMetadata camMetadata;
1967    camera_metadata_t* resultMetadata;
1968
1969    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1970    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1971
1972    // Update the JPEG related info
1973    if (BlobRequest) {
1974        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1975        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1976
1977        int32_t thumbnailSizeTable[2];
1978        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1979        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1980        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1981        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1982               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1983
1984        if (inputjpegsettings->gps_coordinates[0]) {
1985            double gpsCoordinates[3];
1986            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1987            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1988            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1989            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1990            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1991                 gpsCoordinates[1],gpsCoordinates[2]);
1992        }
1993
1994        if (inputjpegsettings->gps_timestamp) {
1995            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1996            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1997        }
1998
1999        String8 str(inputjpegsettings->gps_processing_method);
2000        if (strlen(mJpegSettings->gps_processing_method) > 0) {
2001            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2002        }
2003    }
2004    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2005    uint8_t next_entry;
2006    while (curr_entry != CAM_INTF_PARM_MAX) {
2007       switch (curr_entry) {
2008         case CAM_INTF_META_FACE_DETECTION:{
2009             cam_face_detection_data_t *faceDetectionInfo =
2010                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
2011             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
2012             int32_t faceIds[MAX_ROI];
2013             uint8_t faceScores[MAX_ROI];
2014             int32_t faceRectangles[MAX_ROI * 4];
2015             int32_t faceLandmarks[MAX_ROI * 6];
2016             int j = 0, k = 0;
2017             for (int i = 0; i < numFaces; i++) {
2018                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
2019                 faceScores[i] = faceDetectionInfo->faces[i].score;
2020                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
2021                         faceRectangles+j, -1);
2022                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
2023                 j+= 4;
2024                 k+= 6;
2025             }
2026
2027             if (numFaces <= 0) {
2028                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
2029                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
2030                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
2031                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
2032             }
2033
2034             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
2035             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
2036             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
2037               faceRectangles, numFaces*4);
2038             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
2039               faceLandmarks, numFaces*6);
2040
2041            break;
2042            }
2043         case CAM_INTF_META_COLOR_CORRECT_MODE:{
2044             uint8_t  *color_correct_mode =
2045                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2046             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2047             break;
2048          }
2049
2050         // 3A state is sent in urgent partial result (uses quirk)
2051         case CAM_INTF_META_AEC_PRECAPTURE_ID:
2052         case CAM_INTF_META_AEC_ROI:
2053         case CAM_INTF_META_AEC_STATE:
2054         case CAM_INTF_PARM_FOCUS_MODE:
2055         case CAM_INTF_META_AF_ROI:
2056         case CAM_INTF_META_AF_STATE:
2057         case CAM_INTF_META_AF_TRIGGER_ID:
2058         case CAM_INTF_PARM_WHITE_BALANCE:
2059         case CAM_INTF_META_AWB_REGIONS:
2060         case CAM_INTF_META_AWB_STATE:
2061         case CAM_INTF_META_MODE: {
2062           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
2063           break;
2064         }
2065
2066          case CAM_INTF_META_EDGE_MODE: {
2067             cam_edge_application_t  *edgeApplication =
2068                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
2069             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2070             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2071             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2072             break;
2073          }
2074          case CAM_INTF_META_FLASH_POWER: {
2075             uint8_t  *flashPower =
2076                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2077             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2078             break;
2079          }
2080          case CAM_INTF_META_FLASH_FIRING_TIME: {
2081             int64_t  *flashFiringTime =
2082                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2083             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2084             break;
2085          }
2086          case CAM_INTF_META_FLASH_STATE: {
2087             uint8_t  *flashState =
2088                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
2089             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
2090             break;
2091          }
2092          case CAM_INTF_META_FLASH_MODE:{
2093             uint8_t *flashMode = (uint8_t*)
2094                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
2095             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
2096             break;
2097          }
2098          case CAM_INTF_META_HOTPIXEL_MODE: {
2099              uint8_t  *hotPixelMode =
2100                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2101              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2102              break;
2103          }
2104          case CAM_INTF_META_LENS_APERTURE:{
2105             float  *lensAperture =
2106                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2107             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2108             break;
2109          }
2110          case CAM_INTF_META_LENS_FILTERDENSITY: {
2111             float  *filterDensity =
2112                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2113             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2114             break;
2115          }
2116          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2117             float  *focalLength =
2118                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2119             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2120             break;
2121          }
2122          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2123             float  *focusDistance =
2124                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2125             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2126             break;
2127          }
2128          case CAM_INTF_META_LENS_FOCUS_RANGE: {
2129             float  *focusRange =
2130                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2131             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2132             break;
2133          }
2134          case CAM_INTF_META_LENS_STATE: {
2135             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2136             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2137             break;
2138          }
2139          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2140             uint8_t  *opticalStab =
2141                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2142             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2143             break;
2144          }
2145          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2146             uint8_t  *noiseRedMode =
2147                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2148             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2149             break;
2150          }
2151          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2152             uint8_t  *noiseRedStrength =
2153                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2154             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2155             break;
2156          }
2157          case CAM_INTF_META_SCALER_CROP_REGION: {
2158             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2159             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2160             int32_t scalerCropRegion[4];
2161             scalerCropRegion[0] = hScalerCropRegion->left;
2162             scalerCropRegion[1] = hScalerCropRegion->top;
2163             scalerCropRegion[2] = hScalerCropRegion->width;
2164             scalerCropRegion[3] = hScalerCropRegion->height;
2165             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2166             break;
2167          }
2168          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2169             int64_t  *sensorExpTime =
2170                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2171             mMetadataResponse.exposure_time = *sensorExpTime;
2172             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2173             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2174             break;
2175          }
2176          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2177             int64_t  *sensorFameDuration =
2178                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2179             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2180             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2181             break;
2182          }
2183          case CAM_INTF_META_SENSOR_SENSITIVITY:{
2184             int32_t  *sensorSensitivity =
2185                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2186             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
2187             mMetadataResponse.iso_speed = *sensorSensitivity;
2188             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
2189             break;
2190          }
2191          case CAM_INTF_META_SHADING_MODE: {
2192             uint8_t  *shadingMode =
2193                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2194             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2195             break;
2196          }
2197          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2198             uint8_t  *faceDetectMode =
2199                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2200             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2201                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2202                                                        *faceDetectMode);
2203             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2204             break;
2205          }
2206          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2207             uint8_t  *histogramMode =
2208                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2209             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2210             break;
2211          }
2212          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2213               uint8_t  *sharpnessMapMode =
2214                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2215               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2216                                  sharpnessMapMode, 1);
2217               break;
2218           }
2219          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2220               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2221               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2222               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2223                                  (int32_t*)sharpnessMap->sharpness,
2224                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2225               break;
2226          }
2227          case CAM_INTF_META_LENS_SHADING_MAP: {
2228               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2229               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2230               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2231               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2232               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2233                                  (float*)lensShadingMap->lens_shading,
2234                                  4*map_width*map_height);
2235               break;
2236          }
2237
2238          case CAM_INTF_META_TONEMAP_MODE: {
2239             uint8_t  *toneMapMode =
2240                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2241             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2242             break;
2243          }
2244
2245          case CAM_INTF_META_TONEMAP_CURVES:{
2246             //Populate CAM_INTF_META_TONEMAP_CURVES
2247             /* ch0 = G, ch 1 = B, ch 2 = R*/
2248             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2249             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2250             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2251                                (float*)tonemap->curves[0].tonemap_points,
2252                                tonemap->tonemap_points_cnt * 2);
2253
2254             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2255                                (float*)tonemap->curves[1].tonemap_points,
2256                                tonemap->tonemap_points_cnt * 2);
2257
2258             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2259                                (float*)tonemap->curves[2].tonemap_points,
2260                                tonemap->tonemap_points_cnt * 2);
2261             break;
2262          }
2263          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2264             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2265             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2266             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2267             break;
2268          }
2269          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2270              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2271              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2272              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2273                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2274              break;
2275          }
2276          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2277             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2278             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2279             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2280                       predColorCorrectionGains->gains, 4);
2281             break;
2282          }
2283          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2284             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2285                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2286             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2287                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2288             break;
2289
2290          }
2291          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2292             uint8_t *blackLevelLock = (uint8_t*)
2293               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2294             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2295             break;
2296          }
2297          case CAM_INTF_META_SCENE_FLICKER:{
2298             uint8_t *sceneFlicker = (uint8_t*)
2299             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2300             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2301             break;
2302          }
2303          case CAM_INTF_PARM_LED_MODE:
2304             break;
2305          case CAM_INTF_PARM_EFFECT: {
2306             uint8_t *effectMode = (uint8_t*)
2307                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2308             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2309                                                    sizeof(EFFECT_MODES_MAP),
2310                                                    *effectMode);
2311             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2312             break;
2313          }
2314          case CAM_INTF_META_TEST_PATTERN_DATA: {
2315             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2316                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2317             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2318                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2319                     testPatternData->mode);
2320             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2321                     &fwk_testPatternMode, 1);
2322             break;
2323          }
2324          default:
2325             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2326                   __func__, curr_entry);
2327             break;
2328       }
2329       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2330       curr_entry = next_entry;
2331    }
2332
2333    int32_t hotPixelMap[2];
2334    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2335
2336    resultMetadata = camMetadata.release();
2337    return resultMetadata;
2338}
2339
2340/*===========================================================================
2341 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2342 *
2343 * DESCRIPTION:
2344 *
2345 * PARAMETERS :
2346 *   @metadata : metadata information from callback
2347 *
2348 * RETURN     : camera_metadata_t*
2349 *              metadata in a format specified by fwk
2350 *==========================================================================*/
2351camera_metadata_t*
2352QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2353                                (metadata_buffer_t *metadata) {
2354
2355    CameraMetadata camMetadata;
2356    camera_metadata_t* resultMetadata;
2357
2358    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2359    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2360
2361    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2362    uint8_t next_entry;
2363    while (curr_entry != CAM_INTF_PARM_MAX) {
2364      switch (curr_entry) {
2365        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2366            int32_t  *ae_precapture_id =
2367              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2368            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2369                                          ae_precapture_id, 1);
2370            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2371          break;
2372        }
2373        case CAM_INTF_META_AEC_ROI: {
2374            cam_area_t  *hAeRegions =
2375                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2376            int32_t aeRegions[5];
2377            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2378            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2379            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
2380            break;
2381        }
2382        case CAM_INTF_META_AEC_STATE:{
2383            uint8_t *ae_state =
2384                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2385            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2386            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2387            break;
2388        }
2389        case CAM_INTF_PARM_FOCUS_MODE:{
2390            uint8_t  *focusMode =
2391                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2392            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2393               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2394            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2395            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2396            break;
2397        }
2398        case CAM_INTF_META_AF_ROI:{
2399            /*af regions*/
2400            cam_area_t  *hAfRegions =
2401                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2402            int32_t afRegions[5];
2403            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2404            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2405            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
2406            break;
2407        }
2408        case CAM_INTF_META_AF_STATE: {
2409            uint8_t  *afState =
2410               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2411            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2412            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2413            break;
2414        }
2415        case CAM_INTF_META_AF_TRIGGER_ID: {
2416            int32_t  *afTriggerId =
2417                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2418            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2419            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2420            break;
2421        }
2422        case CAM_INTF_PARM_WHITE_BALANCE: {
2423           uint8_t  *whiteBalance =
2424                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2425             uint8_t fwkWhiteBalanceMode =
2426                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2427                    sizeof(WHITE_BALANCE_MODES_MAP)/
2428                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2429             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2430                 &fwkWhiteBalanceMode, 1);
2431            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2432             break;
2433        }
2434        case CAM_INTF_META_AWB_REGIONS: {
2435           /*awb regions*/
2436           cam_area_t  *hAwbRegions =
2437               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2438           int32_t awbRegions[5];
2439           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2440           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2441           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2442           break;
2443        }
2444        case CAM_INTF_META_AWB_STATE: {
2445           uint8_t  *whiteBalanceState =
2446              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2447           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2448           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2449           break;
2450        }
2451        case CAM_INTF_META_MODE: {
2452            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2453            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2454            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
2455            break;
2456        }
2457        default:
2458            ALOGV("%s: Normal Metadata %d, do not process",
2459              __func__, curr_entry);
2460       }
2461       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2462       curr_entry = next_entry;
2463    }
2464    resultMetadata = camMetadata.release();
2465    return resultMetadata;
2466}
2467
2468/*===========================================================================
2469 * FUNCTION   : convertToRegions
2470 *
2471 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2472 *
2473 * PARAMETERS :
2474 *   @rect   : cam_rect_t struct to convert
2475 *   @region : int32_t destination array
2476 *   @weight : if we are converting from cam_area_t, weight is valid
2477 *             else weight = -1
2478 *
2479 *==========================================================================*/
2480void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2481    region[0] = rect.left;
2482    region[1] = rect.top;
2483    region[2] = rect.left + rect.width;
2484    region[3] = rect.top + rect.height;
2485    if (weight > -1) {
2486        region[4] = weight;
2487    }
2488}
2489
2490/*===========================================================================
2491 * FUNCTION   : convertFromRegions
2492 *
2493 * DESCRIPTION: helper method to convert from array to cam_rect_t
2494 *
2495 * PARAMETERS :
2496 *   @rect   : cam_rect_t struct to convert
2497 *   @region : int32_t destination array
2498 *   @weight : if we are converting from cam_area_t, weight is valid
2499 *             else weight = -1
2500 *
2501 *==========================================================================*/
2502void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2503                                                   const camera_metadata_t *settings,
2504                                                   uint32_t tag){
2505    CameraMetadata frame_settings;
2506    frame_settings = settings;
2507    int32_t x_min = frame_settings.find(tag).data.i32[0];
2508    int32_t y_min = frame_settings.find(tag).data.i32[1];
2509    int32_t x_max = frame_settings.find(tag).data.i32[2];
2510    int32_t y_max = frame_settings.find(tag).data.i32[3];
2511    roi->weight = frame_settings.find(tag).data.i32[4];
2512    roi->rect.left = x_min;
2513    roi->rect.top = y_min;
2514    roi->rect.width = x_max - x_min;
2515    roi->rect.height = y_max - y_min;
2516}
2517
2518/*===========================================================================
2519 * FUNCTION   : resetIfNeededROI
2520 *
2521 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2522 *              crop region
2523 *
2524 * PARAMETERS :
2525 *   @roi       : cam_area_t struct to resize
2526 *   @scalerCropRegion : cam_crop_region_t region to compare against
2527 *
2528 *
2529 *==========================================================================*/
2530bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2531                                                 const cam_crop_region_t* scalerCropRegion)
2532{
2533    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2534    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2535    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2536    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2537    if ((roi_x_max < scalerCropRegion->left) ||
2538        (roi_y_max < scalerCropRegion->top)  ||
2539        (roi->rect.left > crop_x_max) ||
2540        (roi->rect.top > crop_y_max)){
2541        return false;
2542    }
2543    if (roi->rect.left < scalerCropRegion->left) {
2544        roi->rect.left = scalerCropRegion->left;
2545    }
2546    if (roi->rect.top < scalerCropRegion->top) {
2547        roi->rect.top = scalerCropRegion->top;
2548    }
2549    if (roi_x_max > crop_x_max) {
2550        roi_x_max = crop_x_max;
2551    }
2552    if (roi_y_max > crop_y_max) {
2553        roi_y_max = crop_y_max;
2554    }
2555    roi->rect.width = roi_x_max - roi->rect.left;
2556    roi->rect.height = roi_y_max - roi->rect.top;
2557    return true;
2558}
2559
2560/*===========================================================================
2561 * FUNCTION   : convertLandmarks
2562 *
2563 * DESCRIPTION: helper method to extract the landmarks from face detection info
2564 *
2565 * PARAMETERS :
2566 *   @face   : cam_rect_t struct to convert
2567 *   @landmarks : int32_t destination array
2568 *
2569 *
2570 *==========================================================================*/
2571void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2572{
2573    landmarks[0] = face.left_eye_center.x;
2574    landmarks[1] = face.left_eye_center.y;
2575    landmarks[2] = face.right_eye_center.x;
2576    landmarks[3] = face.right_eye_center.y;
2577    landmarks[4] = face.mouth_center.x;
2578    landmarks[5] = face.mouth_center.y;
2579}
2580
2581#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2582/*===========================================================================
2583 * FUNCTION   : initCapabilities
2584 *
2585 * DESCRIPTION: initialize camera capabilities in static data struct
2586 *
2587 * PARAMETERS :
2588 *   @cameraId  : camera Id
2589 *
2590 * RETURN     : int32_t type of status
2591 *              NO_ERROR  -- success
2592 *              none-zero failure code
2593 *==========================================================================*/
2594int QCamera3HardwareInterface::initCapabilities(int cameraId)
2595{
2596    int rc = 0;
2597    mm_camera_vtbl_t *cameraHandle = NULL;
2598    QCamera3HeapMemory *capabilityHeap = NULL;
2599
2600    cameraHandle = camera_open(cameraId);
2601    if (!cameraHandle) {
2602        ALOGE("%s: camera_open failed", __func__);
2603        rc = -1;
2604        goto open_failed;
2605    }
2606
2607    capabilityHeap = new QCamera3HeapMemory();
2608    if (capabilityHeap == NULL) {
2609        ALOGE("%s: creation of capabilityHeap failed", __func__);
2610        goto heap_creation_failed;
2611    }
2612    /* Allocate memory for capability buffer */
2613    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2614    if(rc != OK) {
2615        ALOGE("%s: No memory for cappability", __func__);
2616        goto allocate_failed;
2617    }
2618
2619    /* Map memory for capability buffer */
2620    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2621    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2622                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2623                                capabilityHeap->getFd(0),
2624                                sizeof(cam_capability_t));
2625    if(rc < 0) {
2626        ALOGE("%s: failed to map capability buffer", __func__);
2627        goto map_failed;
2628    }
2629
2630    /* Query Capability */
2631    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2632    if(rc < 0) {
2633        ALOGE("%s: failed to query capability",__func__);
2634        goto query_failed;
2635    }
2636    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2637    if (!gCamCapability[cameraId]) {
2638        ALOGE("%s: out of memory", __func__);
2639        goto query_failed;
2640    }
2641    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2642                                        sizeof(cam_capability_t));
2643    rc = 0;
2644
2645query_failed:
2646    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2647                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2648map_failed:
2649    capabilityHeap->deallocate();
2650allocate_failed:
2651    delete capabilityHeap;
2652heap_creation_failed:
2653    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2654    cameraHandle = NULL;
2655open_failed:
2656    return rc;
2657}
2658
2659/*===========================================================================
2660 * FUNCTION   : initParameters
2661 *
2662 * DESCRIPTION: initialize camera parameters
2663 *
2664 * PARAMETERS :
2665 *
2666 * RETURN     : int32_t type of status
2667 *              NO_ERROR  -- success
2668 *              none-zero failure code
2669 *==========================================================================*/
2670int QCamera3HardwareInterface::initParameters()
2671{
2672    int rc = 0;
2673
2674    //Allocate Set Param Buffer
2675    mParamHeap = new QCamera3HeapMemory();
2676    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2677    if(rc != OK) {
2678        rc = NO_MEMORY;
2679        ALOGE("Failed to allocate SETPARM Heap memory");
2680        delete mParamHeap;
2681        mParamHeap = NULL;
2682        return rc;
2683    }
2684
2685    //Map memory for parameters buffer
2686    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2687            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2688            mParamHeap->getFd(0),
2689            sizeof(parm_buffer_t));
2690    if(rc < 0) {
2691        ALOGE("%s:failed to map SETPARM buffer",__func__);
2692        rc = FAILED_TRANSACTION;
2693        mParamHeap->deallocate();
2694        delete mParamHeap;
2695        mParamHeap = NULL;
2696        return rc;
2697    }
2698
2699    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2700    return rc;
2701}
2702
2703/*===========================================================================
2704 * FUNCTION   : deinitParameters
2705 *
2706 * DESCRIPTION: de-initialize camera parameters
2707 *
2708 * PARAMETERS :
2709 *
2710 * RETURN     : NONE
2711 *==========================================================================*/
2712void QCamera3HardwareInterface::deinitParameters()
2713{
2714    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2715            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2716
2717    mParamHeap->deallocate();
2718    delete mParamHeap;
2719    mParamHeap = NULL;
2720
2721    mParameters = NULL;
2722}
2723
2724/*===========================================================================
2725 * FUNCTION   : calcMaxJpegSize
2726 *
2727 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2728 *
2729 * PARAMETERS :
2730 *
2731 * RETURN     : max_jpeg_size
2732 *==========================================================================*/
2733int QCamera3HardwareInterface::calcMaxJpegSize()
2734{
2735    int32_t max_jpeg_size = 0;
2736    int temp_width, temp_height;
2737    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2738        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2739        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2740        if (temp_width * temp_height > max_jpeg_size ) {
2741            max_jpeg_size = temp_width * temp_height;
2742        }
2743    }
2744    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2745    return max_jpeg_size;
2746}
2747
2748/*===========================================================================
2749 * FUNCTION   : initStaticMetadata
2750 *
2751 * DESCRIPTION: initialize the static metadata
2752 *
2753 * PARAMETERS :
2754 *   @cameraId  : camera Id
2755 *
2756 * RETURN     : int32_t type of status
2757 *              0  -- success
2758 *              non-zero failure code
2759 *==========================================================================*/
2760int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2761{
2762    int rc = 0;
2763    CameraMetadata staticInfo;
2764
2765    /* android.info: hardware level */
2766    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2767    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2768        &supportedHardwareLevel, 1);
2769
2770    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2771    /*HAL 3 only*/
2772    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2773                    &gCamCapability[cameraId]->min_focus_distance, 1);
2774
2775    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2776                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2777
2778    /*should be using focal lengths but sensor doesn't provide that info now*/
2779    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2780                      &gCamCapability[cameraId]->focal_length,
2781                      1);
2782
2783    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2784                      gCamCapability[cameraId]->apertures,
2785                      gCamCapability[cameraId]->apertures_count);
2786
2787    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2788                gCamCapability[cameraId]->filter_densities,
2789                gCamCapability[cameraId]->filter_densities_count);
2790
2791
2792    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2793                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2794                      gCamCapability[cameraId]->optical_stab_modes_count);
2795
2796    staticInfo.update(ANDROID_LENS_POSITION,
2797                      gCamCapability[cameraId]->lens_position,
2798                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2799
2800    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2801                                       gCamCapability[cameraId]->lens_shading_map_size.height};
2802    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2803                      lens_shading_map_size,
2804                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2805
2806    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2807            gCamCapability[cameraId]->sensor_physical_size, 2);
2808
2809    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2810            gCamCapability[cameraId]->exposure_time_range, 2);
2811
2812    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2813            &gCamCapability[cameraId]->max_frame_duration, 1);
2814
2815    camera_metadata_rational baseGainFactor = {
2816            gCamCapability[cameraId]->base_gain_factor.numerator,
2817            gCamCapability[cameraId]->base_gain_factor.denominator};
2818    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2819                      &baseGainFactor, 1);
2820
2821    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2822                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2823
2824    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2825                                  gCamCapability[cameraId]->pixel_array_size.height};
2826    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2827                      pixel_array_size, 2);
2828
2829    int32_t active_array_size[] = {0, 0,
2830                                                gCamCapability[cameraId]->active_array_size.width,
2831                                                gCamCapability[cameraId]->active_array_size.height};
2832    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2833                      active_array_size, 4);
2834
2835    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2836            &gCamCapability[cameraId]->white_level, 1);
2837
2838    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2839            gCamCapability[cameraId]->black_level_pattern, 4);
2840
2841    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2842                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2843
2844    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2845                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2846
2847    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
2848    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2849                      (int32_t*)&maxFaces, 1);
2850
2851    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2852                      &gCamCapability[cameraId]->histogram_size, 1);
2853
2854    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2855            &gCamCapability[cameraId]->max_histogram_count, 1);
2856
2857    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2858                                    gCamCapability[cameraId]->sharpness_map_size.height};
2859
2860    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2861            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2862
2863    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2864            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2865
2866
2867    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2868                      &gCamCapability[cameraId]->raw_min_duration,
2869                       1);
2870
2871    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2872                                HAL_PIXEL_FORMAT_BLOB,
2873                           HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
2874    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2875    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2876                      scalar_formats,
2877                      scalar_formats_count);
2878
2879    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2880    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2881              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2882              available_processed_sizes);
2883    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2884                available_processed_sizes,
2885                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2886
2887    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2888                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2889                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2890
2891    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2892    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2893                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2894                 available_fps_ranges);
2895    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2896            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2897
2898    camera_metadata_rational exposureCompensationStep = {
2899            gCamCapability[cameraId]->exp_compensation_step.numerator,
2900            gCamCapability[cameraId]->exp_compensation_step.denominator};
2901    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2902                      &exposureCompensationStep, 1);
2903
2904    /*TO DO*/
2905    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2906    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2907                      availableVstabModes, sizeof(availableVstabModes));
2908
2909    /** Quirk for urgent 3A state until final interface is worked out */
2910    uint8_t usePartialResultQuirk = 1;
2911    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
2912                      &usePartialResultQuirk, 1);
2913
2914    /*HAL 1 and HAL 3 common*/
2915    float maxZoom = 4;
2916    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2917            &maxZoom, 1);
2918
2919    int32_t max3aRegions[] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
2920    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2921            max3aRegions, 3);
2922
2923    uint8_t availableFaceDetectModes[] = {
2924            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2925            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2926    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2927                      availableFaceDetectModes,
2928                      sizeof(availableFaceDetectModes));
2929
2930    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2931                                           gCamCapability[cameraId]->exposure_compensation_max};
2932    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2933            exposureCompensationRange,
2934            sizeof(exposureCompensationRange)/sizeof(int32_t));
2935
2936    uint8_t lensFacing = (facingBack) ?
2937            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2938    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2939
2940    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2941                available_processed_sizes,
2942                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2943
2944    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2945                      available_thumbnail_sizes,
2946                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2947
2948    /*all sizes will be clubbed into this tag*/
2949    int32_t available_stream_configs_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt *
2950                                    sizeof(scalar_formats)/sizeof(int32_t) * 4;
2951    int32_t available_stream_configs[available_stream_configs_size];
2952    int idx = 0;
2953    for (int j = 0; j < scalar_formats_count; j++) {
2954        for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2955           available_stream_configs[idx] = scalar_formats[j];
2956           available_stream_configs[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2957           available_stream_configs[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2958           available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
2959           idx+=4;
2960        }
2961    }
2962
2963    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2964                      available_stream_configs,
2965                      available_stream_configs_size);
2966
2967
2968
2969    int32_t max_jpeg_size = 0;
2970    int temp_width, temp_height;
2971    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2972        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2973        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2974        if (temp_width * temp_height > max_jpeg_size ) {
2975            max_jpeg_size = temp_width * temp_height;
2976        }
2977    }
2978    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2979    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2980                      &max_jpeg_size, 1);
2981
2982    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2983    size_t size = 0;
2984    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2985        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
2986                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2987                                   gCamCapability[cameraId]->supported_effects[i]);
2988        if (val != NAME_NOT_FOUND) {
2989            avail_effects[size] = (uint8_t)val;
2990            size++;
2991        }
2992    }
2993    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2994                      avail_effects,
2995                      size);
2996
2997    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2998    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2999    int32_t supported_scene_modes_cnt = 0;
3000    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3001        int32_t val = lookupFwkName(SCENE_MODES_MAP,
3002                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3003                                gCamCapability[cameraId]->supported_scene_modes[i]);
3004        if (val != NAME_NOT_FOUND) {
3005            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3006            supported_indexes[supported_scene_modes_cnt] = i;
3007            supported_scene_modes_cnt++;
3008        }
3009    }
3010
3011    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3012                      avail_scene_modes,
3013                      supported_scene_modes_cnt);
3014
3015    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3016    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3017                      supported_scene_modes_cnt,
3018                      scene_mode_overrides,
3019                      supported_indexes,
3020                      cameraId);
3021    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3022                      scene_mode_overrides,
3023                      supported_scene_modes_cnt*3);
3024
3025    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3026    size = 0;
3027    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3028        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3029                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3030                                 gCamCapability[cameraId]->supported_antibandings[i]);
3031        if (val != NAME_NOT_FOUND) {
3032            avail_antibanding_modes[size] = (uint8_t)val;
3033            size++;
3034        }
3035
3036    }
3037    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3038                      avail_antibanding_modes,
3039                      size);
3040
3041    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3042    size = 0;
3043    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3044        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3045                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3046                                gCamCapability[cameraId]->supported_focus_modes[i]);
3047        if (val != NAME_NOT_FOUND) {
3048            avail_af_modes[size] = (uint8_t)val;
3049            size++;
3050        }
3051    }
3052    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3053                      avail_af_modes,
3054                      size);
3055
3056    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3057    size = 0;
3058    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3059        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3060                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3061                                    gCamCapability[cameraId]->supported_white_balances[i]);
3062        if (val != NAME_NOT_FOUND) {
3063            avail_awb_modes[size] = (uint8_t)val;
3064            size++;
3065        }
3066    }
3067    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3068                      avail_awb_modes,
3069                      size);
3070
3071    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3072    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3073      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3074
3075    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3076            available_flash_levels,
3077            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3078
3079    uint8_t flashAvailable;
3080    if (gCamCapability[cameraId]->flash_available)
3081        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3082    else
3083        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3084    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3085            &flashAvailable, 1);
3086
3087    uint8_t avail_ae_modes[5];
3088    size = 0;
3089    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3090        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3091        size++;
3092    }
3093    if (flashAvailable) {
3094        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3095        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3096        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3097    }
3098    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3099                      avail_ae_modes,
3100                      size);
3101
3102    int32_t sensitivity_range[2];
3103    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3104    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3105    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3106                      sensitivity_range,
3107                      sizeof(sensitivity_range) / sizeof(int32_t));
3108
3109    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3110                      &gCamCapability[cameraId]->max_analog_sensitivity,
3111                      1);
3112
3113    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
3114                      &gCamCapability[cameraId]->jpeg_min_duration[0],
3115                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
3116
3117    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3118    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3119                      &sensor_orientation,
3120                      1);
3121
3122    int32_t max_output_streams[3] = {1, 3, 1};
3123    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3124                      max_output_streams,
3125                      3);
3126
3127    uint8_t avail_leds = 0;
3128    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3129                      &avail_leds, 0);
3130
3131    uint8_t focus_dist_calibrated;
3132    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3133            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3134            gCamCapability[cameraId]->focus_dist_calibrated);
3135    if (val != NAME_NOT_FOUND) {
3136        focus_dist_calibrated = (uint8_t)val;
3137        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3138                     &focus_dist_calibrated, 1);
3139    }
3140
3141    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3142    size = 0;
3143    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3144            i++) {
3145        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3146                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3147                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3148        if (val != NAME_NOT_FOUND) {
3149            avail_testpattern_modes[size] = val;
3150            size++;
3151        }
3152    }
3153    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3154                      avail_testpattern_modes,
3155                      size);
3156
3157    uint8_t max_pipeline_depth = kMaxInFlight;
3158    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3159                      &max_pipeline_depth,
3160                      1);
3161
3162    int32_t partial_result_count = 2;
3163    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3164                      &partial_result_count,
3165                       1);
3166
3167    uint8_t available_capabilities[] =
3168        {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
3169         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
3170         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM};
3171    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3172                      available_capabilities,
3173                      3);
3174
3175    int32_t max_input_streams = 0;
3176    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3177                      &max_input_streams,
3178                      1);
3179
3180    int32_t io_format_map[] = {};
3181    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3182                      io_format_map, 0);
3183
3184    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
3185    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3186                      &max_latency,
3187                      1);
3188
3189    float optical_axis_angle[2];
3190    optical_axis_angle[0] = 0; //need to verify
3191    optical_axis_angle[1] = 0; //need to verify
3192    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3193                      optical_axis_angle,
3194                      2);
3195
3196    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3197    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3198                      available_hot_pixel_modes,
3199                      1);
3200
3201    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3202                                      ANDROID_EDGE_MODE_FAST};
3203    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3204                      available_edge_modes,
3205                      2);
3206
3207    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3208                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
3209    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3210                      available_noise_red_modes,
3211                      2);
3212
3213    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3214                                         ANDROID_TONEMAP_MODE_FAST,
3215                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
3216    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3217                      available_tonemap_modes,
3218                      3);
3219
3220    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3221    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3222                      available_hot_pixel_map_modes,
3223                      1);
3224
3225
3226    int32_t avail_min_frame_durations_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3227                                                 sizeof(scalar_formats)/sizeof(int32_t) * 4;
3228    int64_t avail_min_frame_durations[avail_min_frame_durations_size];
3229    int pos = 0;
3230    for (int j = 0; j < scalar_formats_count; j++) {
3231        for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3232           avail_min_frame_durations[pos]   = scalar_formats[j];
3233           avail_min_frame_durations[pos+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3234           avail_min_frame_durations[pos+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3235           avail_min_frame_durations[pos+3] = gCamCapability[cameraId]->jpeg_min_duration[i];
3236           pos+=4;
3237        }
3238    }
3239    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3240                      avail_min_frame_durations,
3241                      avail_min_frame_durations_size);
3242
3243    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
3244       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3245       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3246       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3247       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3248       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3249       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
3250       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3251       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3252       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3253       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3254       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3255       ANDROID_JPEG_GPS_COORDINATES,
3256       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3257       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3258       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3259       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3260       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3261       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3262       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3263       ANDROID_SENSOR_FRAME_DURATION,
3264       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3265       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3266       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3267       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3268       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3269       ANDROID_BLACK_LEVEL_LOCK };
3270    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3271                      available_request_keys,
3272                      sizeof(available_request_keys)/sizeof(int32_t));
3273
3274    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3275       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3276       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
3277       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3278       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3279       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3280       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3281       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3282       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3283       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3284       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3285       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
3286       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3287       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_FORWARD_MATRIX,
3288       ANDROID_SENSOR_COLOR_TRANSFORM, ANDROID_SENSOR_CALIBRATION_TRANSFORM,
3289       ANDROID_SENSOR_SENSITIVITY, ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3290       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
3291       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3292       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
3293       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3294       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3295       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
3296       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
3297       ANDROID_STATISTICS_FACE_SCORES};
3298    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3299                      available_result_keys,
3300                      sizeof(available_result_keys)/sizeof(int32_t));
3301
3302    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3303       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3304       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
3305       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
3306       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3307       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3308       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
3309       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
3310       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3311       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3312       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3313       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3314       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3315       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3316       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3317       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
3318       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3319       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3320       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3321       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3322       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3323       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3324       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3325       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
3326       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3327       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3328       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3329       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3330       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
3331       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3332       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3333       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3334       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3335       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3336       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3337       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3338       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3339       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3340       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3341       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3342       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
3343    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
3344                      available_characteristics_keys,
3345                      sizeof(available_characteristics_keys)/sizeof(int32_t));
3346
3347    gStaticMetadata[cameraId] = staticInfo.release();
3348    return rc;
3349}
3350
3351/*===========================================================================
3352 * FUNCTION   : makeTable
3353 *
3354 * DESCRIPTION: make a table of sizes
3355 *
3356 * PARAMETERS :
3357 *
3358 *
3359 *==========================================================================*/
3360void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
3361                                          int32_t* sizeTable)
3362{
3363    int j = 0;
3364    for (int i = 0; i < size; i++) {
3365        sizeTable[j] = dimTable[i].width;
3366        sizeTable[j+1] = dimTable[i].height;
3367        j+=2;
3368    }
3369}
3370
3371/*===========================================================================
3372 * FUNCTION   : makeFPSTable
3373 *
3374 * DESCRIPTION: make a table of fps ranges
3375 *
3376 * PARAMETERS :
3377 *
3378 *==========================================================================*/
3379void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
3380                                          int32_t* fpsRangesTable)
3381{
3382    int j = 0;
3383    for (int i = 0; i < size; i++) {
3384        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
3385        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
3386        j+=2;
3387    }
3388}
3389
3390/*===========================================================================
3391 * FUNCTION   : makeOverridesList
3392 *
3393 * DESCRIPTION: make a list of scene mode overrides
3394 *
3395 * PARAMETERS :
3396 *
3397 *
3398 *==========================================================================*/
3399void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
3400                                                  uint8_t size, uint8_t* overridesList,
3401                                                  uint8_t* supported_indexes,
3402                                                  int camera_id)
3403{
3404    /*daemon will give a list of overrides for all scene modes.
3405      However we should send the fwk only the overrides for the scene modes
3406      supported by the framework*/
3407    int j = 0, index = 0, supt = 0;
3408    uint8_t focus_override;
3409    for (int i = 0; i < size; i++) {
3410        supt = 0;
3411        index = supported_indexes[i];
3412        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
3413        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3414                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3415                                                    overridesTable[index].awb_mode);
3416        focus_override = (uint8_t)overridesTable[index].af_mode;
3417        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
3418           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
3419              supt = 1;
3420              break;
3421           }
3422        }
3423        if (supt) {
3424           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3425                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3426                                              focus_override);
3427        } else {
3428           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
3429        }
3430        j+=3;
3431    }
3432}
3433
3434/*===========================================================================
3435 * FUNCTION   : getPreviewHalPixelFormat
3436 *
3437 * DESCRIPTION: convert the format to type recognized by framework
3438 *
3439 * PARAMETERS : format : the format from backend
3440 *
3441 ** RETURN    : format recognized by framework
3442 *
3443 *==========================================================================*/
3444int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
3445{
3446    int32_t halPixelFormat;
3447
3448    switch (format) {
3449    case CAM_FORMAT_YUV_420_NV12:
3450        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
3451        break;
3452    case CAM_FORMAT_YUV_420_NV21:
3453        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3454        break;
3455    case CAM_FORMAT_YUV_420_NV21_ADRENO:
3456        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
3457        break;
3458    case CAM_FORMAT_YUV_420_YV12:
3459        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
3460        break;
3461    case CAM_FORMAT_YUV_422_NV16:
3462    case CAM_FORMAT_YUV_422_NV61:
3463    default:
3464        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3465        break;
3466    }
3467    return halPixelFormat;
3468}
3469
3470/*===========================================================================
3471 * FUNCTION   : getSensorSensitivity
3472 *
3473 * DESCRIPTION: convert iso_mode to an integer value
3474 *
3475 * PARAMETERS : iso_mode : the iso_mode supported by sensor
3476 *
3477 ** RETURN    : sensitivity supported by sensor
3478 *
3479 *==========================================================================*/
3480int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
3481{
3482    int32_t sensitivity;
3483
3484    switch (iso_mode) {
3485    case CAM_ISO_MODE_100:
3486        sensitivity = 100;
3487        break;
3488    case CAM_ISO_MODE_200:
3489        sensitivity = 200;
3490        break;
3491    case CAM_ISO_MODE_400:
3492        sensitivity = 400;
3493        break;
3494    case CAM_ISO_MODE_800:
3495        sensitivity = 800;
3496        break;
3497    case CAM_ISO_MODE_1600:
3498        sensitivity = 1600;
3499        break;
3500    default:
3501        sensitivity = -1;
3502        break;
3503    }
3504    return sensitivity;
3505}
3506
3507
3508/*===========================================================================
3509 * FUNCTION   : AddSetParmEntryToBatch
3510 *
3511 * DESCRIPTION: add set parameter entry into batch
3512 *
3513 * PARAMETERS :
3514 *   @p_table     : ptr to parameter buffer
3515 *   @paramType   : parameter type
3516 *   @paramLength : length of parameter value
3517 *   @paramValue  : ptr to parameter value
3518 *
3519 * RETURN     : int32_t type of status
3520 *              NO_ERROR  -- success
3521 *              none-zero failure code
3522 *==========================================================================*/
3523int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
3524                                                          cam_intf_parm_type_t paramType,
3525                                                          uint32_t paramLength,
3526                                                          void *paramValue)
3527{
3528    int position = paramType;
3529    int current, next;
3530
3531    /*************************************************************************
3532    *                 Code to take care of linking next flags                *
3533    *************************************************************************/
3534    current = GET_FIRST_PARAM_ID(p_table);
3535    if (position == current){
3536        //DO NOTHING
3537    } else if (position < current){
3538        SET_NEXT_PARAM_ID(position, p_table, current);
3539        SET_FIRST_PARAM_ID(p_table, position);
3540    } else {
3541        /* Search for the position in the linked list where we need to slot in*/
3542        while (position > GET_NEXT_PARAM_ID(current, p_table))
3543            current = GET_NEXT_PARAM_ID(current, p_table);
3544
3545        /*If node already exists no need to alter linking*/
3546        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
3547            next = GET_NEXT_PARAM_ID(current, p_table);
3548            SET_NEXT_PARAM_ID(current, p_table, position);
3549            SET_NEXT_PARAM_ID(position, p_table, next);
3550        }
3551    }
3552
3553    /*************************************************************************
3554    *                   Copy contents into entry                             *
3555    *************************************************************************/
3556
3557    if (paramLength > sizeof(parm_type_t)) {
3558        ALOGE("%s:Size of input larger than max entry size",__func__);
3559        return BAD_VALUE;
3560    }
3561    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
3562    return NO_ERROR;
3563}
3564
3565/*===========================================================================
3566 * FUNCTION   : lookupFwkName
3567 *
3568 * DESCRIPTION: In case the enum is not same in fwk and backend
3569 *              make sure the parameter is correctly propogated
3570 *
3571 * PARAMETERS  :
3572 *   @arr      : map between the two enums
3573 *   @len      : len of the map
3574 *   @hal_name : name of the hal_parm to map
3575 *
3576 * RETURN     : int type of status
3577 *              fwk_name  -- success
3578 *              none-zero failure code
3579 *==========================================================================*/
3580int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
3581                                             int len, int hal_name)
3582{
3583
3584    for (int i = 0; i < len; i++) {
3585        if (arr[i].hal_name == hal_name)
3586            return arr[i].fwk_name;
3587    }
3588
3589    /* Not able to find matching framework type is not necessarily
3590     * an error case. This happens when mm-camera supports more attributes
3591     * than the frameworks do */
3592    ALOGD("%s: Cannot find matching framework type", __func__);
3593    return NAME_NOT_FOUND;
3594}
3595
3596/*===========================================================================
3597 * FUNCTION   : lookupHalName
3598 *
3599 * DESCRIPTION: In case the enum is not same in fwk and backend
3600 *              make sure the parameter is correctly propogated
3601 *
3602 * PARAMETERS  :
3603 *   @arr      : map between the two enums
3604 *   @len      : len of the map
3605 *   @fwk_name : name of the hal_parm to map
3606 *
3607 * RETURN     : int32_t type of status
3608 *              hal_name  -- success
3609 *              none-zero failure code
3610 *==========================================================================*/
3611int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
3612                                             int len, unsigned int fwk_name)
3613{
3614    for (int i = 0; i < len; i++) {
3615       if (arr[i].fwk_name == fwk_name)
3616           return arr[i].hal_name;
3617    }
3618    ALOGE("%s: Cannot find matching hal type", __func__);
3619    return NAME_NOT_FOUND;
3620}
3621
3622/*===========================================================================
3623 * FUNCTION   : getCapabilities
3624 *
3625 * DESCRIPTION: query camera capabilities
3626 *
3627 * PARAMETERS :
3628 *   @cameraId  : camera Id
3629 *   @info      : camera info struct to be filled in with camera capabilities
3630 *
3631 * RETURN     : int32_t type of status
3632 *              NO_ERROR  -- success
3633 *              none-zero failure code
3634 *==========================================================================*/
3635int QCamera3HardwareInterface::getCamInfo(int cameraId,
3636                                    struct camera_info *info)
3637{
3638    int rc = 0;
3639
3640    if (NULL == gCamCapability[cameraId]) {
3641        rc = initCapabilities(cameraId);
3642        if (rc < 0) {
3643            //pthread_mutex_unlock(&g_camlock);
3644            return rc;
3645        }
3646    }
3647
3648    if (NULL == gStaticMetadata[cameraId]) {
3649        rc = initStaticMetadata(cameraId);
3650        if (rc < 0) {
3651            return rc;
3652        }
3653    }
3654
3655    switch(gCamCapability[cameraId]->position) {
3656    case CAM_POSITION_BACK:
3657        info->facing = CAMERA_FACING_BACK;
3658        break;
3659
3660    case CAM_POSITION_FRONT:
3661        info->facing = CAMERA_FACING_FRONT;
3662        break;
3663
3664    default:
3665        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3666        rc = -1;
3667        break;
3668    }
3669
3670
3671    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3672    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
3673    info->static_camera_characteristics = gStaticMetadata[cameraId];
3674
3675    return rc;
3676}
3677
3678/*===========================================================================
3679 * FUNCTION   : translateMetadata
3680 *
3681 * DESCRIPTION: translate the metadata into camera_metadata_t
3682 *
3683 * PARAMETERS : type of the request
3684 *
3685 *
3686 * RETURN     : success: camera_metadata_t*
3687 *              failure: NULL
3688 *
3689 *==========================================================================*/
3690camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3691{
3692    pthread_mutex_lock(&mMutex);
3693
3694    if (mDefaultMetadata[type] != NULL) {
3695        pthread_mutex_unlock(&mMutex);
3696        return mDefaultMetadata[type];
3697    }
3698    //first time we are handling this request
3699    //fill up the metadata structure using the wrapper class
3700    CameraMetadata settings;
3701    //translate from cam_capability_t to camera_metadata_tag_t
3702    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3703    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3704    int32_t defaultRequestID = 0;
3705    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3706
3707    uint8_t controlIntent = 0;
3708    uint8_t focusMode;
3709    switch (type) {
3710      case CAMERA3_TEMPLATE_PREVIEW:
3711        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3712        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3713        break;
3714      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3715        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3716        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3717        break;
3718      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3719        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3720        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3721        break;
3722      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3723        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3724        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3725        break;
3726      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3727        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3728        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3729        break;
3730      default:
3731        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3732        break;
3733    }
3734    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3735
3736    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
3737        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3738    }
3739    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3740
3741    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3742            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3743
3744    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3745    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3746
3747    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3748    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3749
3750    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3751    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3752
3753    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3754    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3755
3756    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3757    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3758
3759    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3760    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3761
3762    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3763    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3764
3765    /*flash*/
3766    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3767    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3768
3769    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3770    settings.update(ANDROID_FLASH_FIRING_POWER,
3771            &flashFiringLevel, 1);
3772
3773    /* lens */
3774    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3775    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3776
3777    if (gCamCapability[mCameraId]->filter_densities_count) {
3778        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3779        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3780                        gCamCapability[mCameraId]->filter_densities_count);
3781    }
3782
3783    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3784    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3785
3786    /* Exposure time(Update the Min Exposure Time)*/
3787    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3788    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3789
3790    /* frame duration */
3791    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3792    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3793
3794    /* sensitivity */
3795    static const int32_t default_sensitivity = 100;
3796    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3797
3798    /*edge mode*/
3799    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3800    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3801
3802    /*noise reduction mode*/
3803    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3804    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3805
3806    /*color correction mode*/
3807    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3808    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3809
3810    /*transform matrix mode*/
3811    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3812    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3813
3814    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3815    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3816
3817    int32_t scaler_crop_region[4];
3818    scaler_crop_region[0] = 0;
3819    scaler_crop_region[1] = 0;
3820    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3821    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3822    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3823
3824    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3825    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3826
3827    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3828    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3829
3830    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3831                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3832                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3833    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3834
3835    /*focus distance*/
3836    float focus_distance = 0.0;
3837    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
3838
3839    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
3840    float max_range = 0.0;
3841    float max_fixed_fps = 0.0;
3842    int32_t fps_range[2] = {0, 0};
3843    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
3844            i++) {
3845        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
3846            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
3847        if (type == CAMERA3_TEMPLATE_PREVIEW ||
3848                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
3849                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
3850            if (range > max_range) {
3851                fps_range[0] =
3852                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
3853                fps_range[1] =
3854                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
3855                max_range = range;
3856            }
3857        } else {
3858            if (range < 0.01 && max_fixed_fps <
3859                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
3860                fps_range[0] =
3861                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
3862                fps_range[1] =
3863                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
3864                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
3865            }
3866        }
3867    }
3868    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
3869
3870    /*precapture trigger*/
3871    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
3872    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
3873
3874    /*af trigger*/
3875    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
3876    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
3877
3878    /* ae & af regions */
3879    int32_t active_region[] = {
3880            gCamCapability[mCameraId]->active_array_size.left,
3881            gCamCapability[mCameraId]->active_array_size.top,
3882            gCamCapability[mCameraId]->active_array_size.left +
3883                    gCamCapability[mCameraId]->active_array_size.width,
3884            gCamCapability[mCameraId]->active_array_size.top +
3885                    gCamCapability[mCameraId]->active_array_size.height,
3886            1};
3887    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
3888    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
3889
3890    /* black level lock */
3891    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
3892    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
3893
3894    /* face detect mode */
3895    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
3896    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
3897
3898    /* lens shading map mode */
3899    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
3900    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
3901
3902    mDefaultMetadata[type] = settings.release();
3903
3904    pthread_mutex_unlock(&mMutex);
3905    return mDefaultMetadata[type];
3906}
3907
3908/*===========================================================================
3909 * FUNCTION   : setFrameParameters
3910 *
3911 * DESCRIPTION: set parameters per frame as requested in the metadata from
3912 *              framework
3913 *
3914 * PARAMETERS :
3915 *   @request   : request that needs to be serviced
3916 *   @streamID : Stream ID of all the requested streams
3917 *
3918 * RETURN     : success: NO_ERROR
3919 *              failure:
3920 *==========================================================================*/
3921int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3922                    cam_stream_ID_t streamID)
3923{
3924    /*translate from camera_metadata_t type to parm_type_t*/
3925    int rc = 0;
3926    if (request->settings == NULL && mFirstRequest) {
3927        /*settings cannot be null for the first request*/
3928        return BAD_VALUE;
3929    }
3930
3931    int32_t hal_version = CAM_HAL_V3;
3932
3933    memset(mParameters, 0, sizeof(parm_buffer_t));
3934    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3935    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3936                sizeof(hal_version), &hal_version);
3937    if (rc < 0) {
3938        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3939        return BAD_VALUE;
3940    }
3941
3942    /*we need to update the frame number in the parameters*/
3943    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3944                                sizeof(request->frame_number), &(request->frame_number));
3945    if (rc < 0) {
3946        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3947        return BAD_VALUE;
3948    }
3949
3950    /* Update stream id of all the requested buffers */
3951    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
3952                                sizeof(cam_stream_ID_t), &streamID);
3953
3954    if (rc < 0) {
3955        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3956        return BAD_VALUE;
3957    }
3958
3959    if(request->settings != NULL){
3960        rc = translateMetadataToParameters(request);
3961    }
3962
3963    /*set the parameters to backend*/
3964    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3965    return rc;
3966}
3967
3968/*===========================================================================
3969 * FUNCTION   : translateMetadataToParameters
3970 *
3971 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3972 *
3973 *
3974 * PARAMETERS :
3975 *   @request  : request sent from framework
3976 *
3977 *
3978 * RETURN     : success: NO_ERROR
3979 *              failure:
3980 *==========================================================================*/
3981int QCamera3HardwareInterface::translateMetadataToParameters
3982                                  (const camera3_capture_request_t *request)
3983{
3984    int rc = 0;
3985    CameraMetadata frame_settings;
3986    frame_settings = request->settings;
3987
3988    /* Do not change the order of the following list unless you know what you are
3989     * doing.
3990     * The order is laid out in such a way that parameters in the front of the table
3991     * may be used to override the parameters later in the table. Examples are:
3992     * 1. META_MODE should precede AEC/AWB/AF MODE
3993     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3994     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3995     * 4. Any mode should precede it's corresponding settings
3996     */
3997    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3998        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3999        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
4000                sizeof(metaMode), &metaMode);
4001        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4002           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4003           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4004                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4005                                             fwk_sceneMode);
4006           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
4007                sizeof(sceneMode), &sceneMode);
4008        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4009           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4010           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
4011                sizeof(sceneMode), &sceneMode);
4012        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4013           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4014           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
4015                sizeof(sceneMode), &sceneMode);
4016        }
4017    }
4018
4019    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4020        uint8_t fwk_aeMode =
4021            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4022        uint8_t aeMode;
4023        int32_t redeye;
4024
4025        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4026            aeMode = CAM_AE_MODE_OFF;
4027        } else {
4028            aeMode = CAM_AE_MODE_ON;
4029        }
4030        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4031            redeye = 1;
4032        } else {
4033            redeye = 0;
4034        }
4035
4036        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
4037                                          sizeof(AE_FLASH_MODE_MAP),
4038                                          fwk_aeMode);
4039        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
4040                sizeof(aeMode), &aeMode);
4041        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
4042                sizeof(flashMode), &flashMode);
4043        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
4044                sizeof(redeye), &redeye);
4045    }
4046
4047    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
4048        uint8_t fwk_whiteLevel =
4049            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
4050        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
4051                sizeof(WHITE_BALANCE_MODES_MAP),
4052                fwk_whiteLevel);
4053        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
4054                sizeof(whiteLevel), &whiteLevel);
4055    }
4056
4057    float focalDistance = -1.0;
4058    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
4059        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
4060        rc = AddSetParmEntryToBatch(mParameters,
4061                CAM_INTF_META_LENS_FOCUS_DISTANCE,
4062                sizeof(focalDistance), &focalDistance);
4063    }
4064
4065    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
4066        uint8_t fwk_focusMode =
4067            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
4068        uint8_t focusMode;
4069        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
4070            focusMode = CAM_FOCUS_MODE_INFINITY;
4071        } else{
4072         focusMode = lookupHalName(FOCUS_MODES_MAP,
4073                                   sizeof(FOCUS_MODES_MAP),
4074                                   fwk_focusMode);
4075        }
4076        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
4077                sizeof(focusMode), &focusMode);
4078    }
4079
4080    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
4081        int32_t antibandingMode =
4082            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
4083        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
4084                sizeof(antibandingMode), &antibandingMode);
4085    }
4086
4087    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4088        int32_t expCompensation = frame_settings.find(
4089            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4090        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4091            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4092        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4093            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4094        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
4095          sizeof(expCompensation), &expCompensation);
4096    }
4097
4098    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
4099        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
4100        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
4101                sizeof(aeLock), &aeLock);
4102    }
4103    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4104        cam_fps_range_t fps_range;
4105        fps_range.min_fps =
4106            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
4107        fps_range.max_fps =
4108            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4109        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
4110                sizeof(fps_range), &fps_range);
4111    }
4112
4113    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
4114        uint8_t awbLock =
4115            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
4116        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
4117                sizeof(awbLock), &awbLock);
4118    }
4119
4120    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
4121        uint8_t fwk_effectMode =
4122            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
4123        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
4124                sizeof(EFFECT_MODES_MAP),
4125                fwk_effectMode);
4126        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
4127                sizeof(effectMode), &effectMode);
4128    }
4129
4130    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
4131        uint8_t colorCorrectMode =
4132            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
4133        rc =
4134            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
4135                    sizeof(colorCorrectMode), &colorCorrectMode);
4136    }
4137
4138    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
4139        cam_color_correct_gains_t colorCorrectGains;
4140        for (int i = 0; i < 4; i++) {
4141            colorCorrectGains.gains[i] =
4142                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
4143        }
4144        rc =
4145            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
4146                    sizeof(colorCorrectGains), &colorCorrectGains);
4147    }
4148
4149    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
4150        cam_color_correct_matrix_t colorCorrectTransform;
4151        cam_rational_type_t transform_elem;
4152        int num = 0;
4153        for (int i = 0; i < 3; i++) {
4154           for (int j = 0; j < 3; j++) {
4155              transform_elem.numerator =
4156                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
4157              transform_elem.denominator =
4158                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
4159              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
4160              num++;
4161           }
4162        }
4163        rc =
4164            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
4165                    sizeof(colorCorrectTransform), &colorCorrectTransform);
4166    }
4167
4168    cam_trigger_t aecTrigger;
4169    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
4170    aecTrigger.trigger_id = -1;
4171    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
4172        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
4173        aecTrigger.trigger =
4174            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
4175        aecTrigger.trigger_id =
4176            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
4177    }
4178    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
4179                                sizeof(aecTrigger), &aecTrigger);
4180
4181    /*af_trigger must come with a trigger id*/
4182    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
4183        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
4184        cam_trigger_t af_trigger;
4185        af_trigger.trigger =
4186            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
4187        af_trigger.trigger_id =
4188            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
4189        rc = AddSetParmEntryToBatch(mParameters,
4190                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
4191    }
4192
4193    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
4194        int32_t demosaic =
4195            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
4196        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
4197                sizeof(demosaic), &demosaic);
4198    }
4199
4200    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
4201        cam_edge_application_t edge_application;
4202        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4203        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
4204            edge_application.sharpness = 0;
4205        } else {
4206            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
4207                uint8_t edgeStrength =
4208                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
4209                edge_application.sharpness = (int32_t)edgeStrength;
4210            } else {
4211                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
4212            }
4213        }
4214        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
4215                sizeof(edge_application), &edge_application);
4216    }
4217
4218    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
4219        int32_t respectFlashMode = 1;
4220        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4221            uint8_t fwk_aeMode =
4222                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4223            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
4224                respectFlashMode = 0;
4225                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
4226                    __func__);
4227            }
4228        }
4229        if (respectFlashMode) {
4230            uint8_t flashMode =
4231                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
4232            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
4233                                          sizeof(FLASH_MODES_MAP),
4234                                          flashMode);
4235            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
4236            // To check: CAM_INTF_META_FLASH_MODE usage
4237            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
4238                          sizeof(flashMode), &flashMode);
4239        }
4240    }
4241
4242    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
4243        uint8_t flashPower =
4244            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
4245        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
4246                sizeof(flashPower), &flashPower);
4247    }
4248
4249    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
4250        int64_t flashFiringTime =
4251            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
4252        rc = AddSetParmEntryToBatch(mParameters,
4253                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
4254    }
4255
4256    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
4257        uint8_t hotPixelMode =
4258            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
4259        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
4260                sizeof(hotPixelMode), &hotPixelMode);
4261    }
4262
4263    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
4264        float lensAperture =
4265            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
4266        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
4267                sizeof(lensAperture), &lensAperture);
4268    }
4269
4270    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
4271        float filterDensity =
4272            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
4273        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
4274                sizeof(filterDensity), &filterDensity);
4275    }
4276
4277    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4278        float focalLength =
4279            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4280        rc = AddSetParmEntryToBatch(mParameters,
4281                CAM_INTF_META_LENS_FOCAL_LENGTH,
4282                sizeof(focalLength), &focalLength);
4283    }
4284
4285    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
4286        uint8_t optStabMode =
4287            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
4288        rc = AddSetParmEntryToBatch(mParameters,
4289                CAM_INTF_META_LENS_OPT_STAB_MODE,
4290                sizeof(optStabMode), &optStabMode);
4291    }
4292
4293    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4294        uint8_t noiseRedMode =
4295            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4296        rc = AddSetParmEntryToBatch(mParameters,
4297                CAM_INTF_META_NOISE_REDUCTION_MODE,
4298                sizeof(noiseRedMode), &noiseRedMode);
4299    }
4300
4301    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
4302        uint8_t noiseRedStrength =
4303            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
4304        rc = AddSetParmEntryToBatch(mParameters,
4305                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
4306                sizeof(noiseRedStrength), &noiseRedStrength);
4307    }
4308
4309    cam_crop_region_t scalerCropRegion;
4310    bool scalerCropSet = false;
4311    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
4312        scalerCropRegion.left =
4313            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
4314        scalerCropRegion.top =
4315            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
4316        scalerCropRegion.width =
4317            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
4318        scalerCropRegion.height =
4319            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
4320        rc = AddSetParmEntryToBatch(mParameters,
4321                CAM_INTF_META_SCALER_CROP_REGION,
4322                sizeof(scalerCropRegion), &scalerCropRegion);
4323        scalerCropSet = true;
4324    }
4325
4326    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
4327        int64_t sensorExpTime =
4328            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
4329        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
4330        rc = AddSetParmEntryToBatch(mParameters,
4331                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
4332                sizeof(sensorExpTime), &sensorExpTime);
4333    }
4334
4335    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
4336        int64_t sensorFrameDuration =
4337            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
4338        int64_t minFrameDuration = getMinFrameDuration(request);
4339        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
4340        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
4341            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
4342        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
4343        rc = AddSetParmEntryToBatch(mParameters,
4344                CAM_INTF_META_SENSOR_FRAME_DURATION,
4345                sizeof(sensorFrameDuration), &sensorFrameDuration);
4346    }
4347
4348    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4349        int32_t sensorSensitivity =
4350            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4351        if (sensorSensitivity <
4352                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
4353            sensorSensitivity =
4354                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
4355        if (sensorSensitivity >
4356                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
4357            sensorSensitivity =
4358                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
4359        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
4360        rc = AddSetParmEntryToBatch(mParameters,
4361                CAM_INTF_META_SENSOR_SENSITIVITY,
4362                sizeof(sensorSensitivity), &sensorSensitivity);
4363    }
4364
4365    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
4366        int32_t shadingMode =
4367            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
4368        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
4369                sizeof(shadingMode), &shadingMode);
4370    }
4371
4372    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
4373        uint8_t shadingStrength =
4374            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
4375        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
4376                sizeof(shadingStrength), &shadingStrength);
4377    }
4378
4379    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
4380        uint8_t fwk_facedetectMode =
4381            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
4382        uint8_t facedetectMode =
4383            lookupHalName(FACEDETECT_MODES_MAP,
4384                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
4385        rc = AddSetParmEntryToBatch(mParameters,
4386                CAM_INTF_META_STATS_FACEDETECT_MODE,
4387                sizeof(facedetectMode), &facedetectMode);
4388    }
4389
4390    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
4391        uint8_t histogramMode =
4392            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
4393        rc = AddSetParmEntryToBatch(mParameters,
4394                CAM_INTF_META_STATS_HISTOGRAM_MODE,
4395                sizeof(histogramMode), &histogramMode);
4396    }
4397
4398    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
4399        uint8_t sharpnessMapMode =
4400            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
4401        rc = AddSetParmEntryToBatch(mParameters,
4402                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
4403                sizeof(sharpnessMapMode), &sharpnessMapMode);
4404    }
4405
4406    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
4407        uint8_t tonemapMode =
4408            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
4409        rc = AddSetParmEntryToBatch(mParameters,
4410                CAM_INTF_META_TONEMAP_MODE,
4411                sizeof(tonemapMode), &tonemapMode);
4412    }
4413    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
4414    /*All tonemap channels will have the same number of points*/
4415    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
4416        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
4417        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
4418        cam_rgb_tonemap_curves tonemapCurves;
4419        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
4420
4421        /* ch0 = G*/
4422        int point = 0;
4423        cam_tonemap_curve_t tonemapCurveGreen;
4424        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
4425            for (int j = 0; j < 2; j++) {
4426               tonemapCurveGreen.tonemap_points[i][j] =
4427                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
4428               point++;
4429            }
4430        }
4431        tonemapCurves.curves[0] = tonemapCurveGreen;
4432
4433        /* ch 1 = B */
4434        point = 0;
4435        cam_tonemap_curve_t tonemapCurveBlue;
4436        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4437            for (int j = 0; j < 2; j++) {
4438               tonemapCurveBlue.tonemap_points[i][j] =
4439                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
4440               point++;
4441            }
4442        }
4443        tonemapCurves.curves[1] = tonemapCurveBlue;
4444
4445        /* ch 2 = R */
4446        point = 0;
4447        cam_tonemap_curve_t tonemapCurveRed;
4448        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4449            for (int j = 0; j < 2; j++) {
4450               tonemapCurveRed.tonemap_points[i][j] =
4451                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
4452               point++;
4453            }
4454        }
4455        tonemapCurves.curves[2] = tonemapCurveRed;
4456
4457        rc = AddSetParmEntryToBatch(mParameters,
4458                CAM_INTF_META_TONEMAP_CURVES,
4459                sizeof(tonemapCurves), &tonemapCurves);
4460    }
4461
4462    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4463        uint8_t captureIntent =
4464            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4465        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
4466                sizeof(captureIntent), &captureIntent);
4467    }
4468
4469    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
4470        uint8_t blackLevelLock =
4471            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
4472        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
4473                sizeof(blackLevelLock), &blackLevelLock);
4474    }
4475
4476    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
4477        uint8_t lensShadingMapMode =
4478            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
4479        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
4480                sizeof(lensShadingMapMode), &lensShadingMapMode);
4481    }
4482
4483    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
4484        cam_area_t roi;
4485        bool reset = true;
4486        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
4487        if (scalerCropSet) {
4488            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4489        }
4490        if (reset) {
4491            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
4492                    sizeof(roi), &roi);
4493        }
4494    }
4495
4496    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
4497        cam_area_t roi;
4498        bool reset = true;
4499        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
4500        if (scalerCropSet) {
4501            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4502        }
4503        if (reset) {
4504            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
4505                    sizeof(roi), &roi);
4506        }
4507    }
4508
4509    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
4510        cam_area_t roi;
4511        bool reset = true;
4512        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
4513        if (scalerCropSet) {
4514            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4515        }
4516        if (reset) {
4517            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
4518                    sizeof(roi), &roi);
4519        }
4520    }
4521
4522    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
4523        cam_test_pattern_data_t testPatternData;
4524        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
4525        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
4526               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
4527
4528        memset(&testPatternData, 0, sizeof(testPatternData));
4529        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
4530        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
4531                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
4532            int32_t* fwk_testPatternData = frame_settings.find(
4533                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
4534            testPatternData.r = fwk_testPatternData[0];
4535            testPatternData.b = fwk_testPatternData[3];
4536            switch (gCamCapability[mCameraId]->color_arrangement) {
4537            case CAM_FILTER_ARRANGEMENT_RGGB:
4538            case CAM_FILTER_ARRANGEMENT_GRBG:
4539                testPatternData.gr = fwk_testPatternData[1];
4540                testPatternData.gb = fwk_testPatternData[2];
4541                break;
4542            case CAM_FILTER_ARRANGEMENT_GBRG:
4543            case CAM_FILTER_ARRANGEMENT_BGGR:
4544                testPatternData.gr = fwk_testPatternData[2];
4545                testPatternData.gb = fwk_testPatternData[1];
4546                break;
4547            default:
4548                ALOGE("%s: color arrangement %d is not supported", __func__,
4549                    gCamCapability[mCameraId]->color_arrangement);
4550                break;
4551            }
4552        }
4553        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
4554            sizeof(testPatternData), &testPatternData);
4555    }
4556    return rc;
4557}
4558
4559/*===========================================================================
4560 * FUNCTION   : getJpegSettings
4561 *
4562 * DESCRIPTION: save the jpeg settings in the HAL
4563 *
4564 *
4565 * PARAMETERS :
4566 *   @settings  : frame settings information from framework
4567 *
4568 *
4569 * RETURN     : success: NO_ERROR
4570 *              failure:
4571 *==========================================================================*/
4572int QCamera3HardwareInterface::getJpegSettings
4573                                  (const camera_metadata_t *settings)
4574{
4575    if (mJpegSettings) {
4576        if (mJpegSettings->gps_timestamp) {
4577            free(mJpegSettings->gps_timestamp);
4578            mJpegSettings->gps_timestamp = NULL;
4579        }
4580        if (mJpegSettings->gps_coordinates) {
4581            for (int i = 0; i < 3; i++) {
4582                free(mJpegSettings->gps_coordinates[i]);
4583                mJpegSettings->gps_coordinates[i] = NULL;
4584            }
4585        }
4586        free(mJpegSettings);
4587        mJpegSettings = NULL;
4588    }
4589    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
4590    CameraMetadata jpeg_settings;
4591    jpeg_settings = settings;
4592
4593    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
4594        mJpegSettings->jpeg_orientation =
4595            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
4596    } else {
4597        mJpegSettings->jpeg_orientation = 0;
4598    }
4599    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
4600        mJpegSettings->jpeg_quality =
4601            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
4602    } else {
4603        mJpegSettings->jpeg_quality = 85;
4604    }
4605    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
4606        mJpegSettings->thumbnail_size.width =
4607            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
4608        mJpegSettings->thumbnail_size.height =
4609            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
4610    } else {
4611        mJpegSettings->thumbnail_size.width = 0;
4612        mJpegSettings->thumbnail_size.height = 0;
4613    }
4614    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
4615        for (int i = 0; i < 3; i++) {
4616            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
4617            *(mJpegSettings->gps_coordinates[i]) =
4618                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
4619        }
4620    } else{
4621       for (int i = 0; i < 3; i++) {
4622            mJpegSettings->gps_coordinates[i] = NULL;
4623        }
4624    }
4625
4626    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
4627        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
4628        *(mJpegSettings->gps_timestamp) =
4629            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
4630    } else {
4631        mJpegSettings->gps_timestamp = NULL;
4632    }
4633
4634    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
4635        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
4636        for (int i = 0; i < len; i++) {
4637            mJpegSettings->gps_processing_method[i] =
4638                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
4639        }
4640        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
4641            mJpegSettings->gps_processing_method[len] = '\0';
4642        }
4643    } else {
4644        mJpegSettings->gps_processing_method[0] = '\0';
4645    }
4646
4647    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4648        mJpegSettings->sensor_sensitivity =
4649            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4650    } else {
4651        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
4652    }
4653
4654    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
4655
4656    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4657        mJpegSettings->lens_focal_length =
4658            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4659    }
4660    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4661        mJpegSettings->exposure_compensation =
4662            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4663    }
4664    mJpegSettings->sharpness = 10; //default value
4665    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
4666        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4667        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
4668            mJpegSettings->sharpness = 0;
4669        }
4670    }
4671    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
4672    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
4673    mJpegSettings->is_jpeg_format = true;
4674    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
4675    return 0;
4676}
4677
4678/*===========================================================================
4679 * FUNCTION   : captureResultCb
4680 *
4681 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
4682 *
4683 * PARAMETERS :
4684 *   @frame  : frame information from mm-camera-interface
4685 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
4686 *   @userdata: userdata
4687 *
4688 * RETURN     : NONE
4689 *==========================================================================*/
4690void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
4691                camera3_stream_buffer_t *buffer,
4692                uint32_t frame_number, void *userdata)
4693{
4694    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
4695    if (hw == NULL) {
4696        ALOGE("%s: Invalid hw %p", __func__, hw);
4697        return;
4698    }
4699
4700    hw->captureResultCb(metadata, buffer, frame_number);
4701    return;
4702}
4703
4704
4705/*===========================================================================
4706 * FUNCTION   : initialize
4707 *
4708 * DESCRIPTION: Pass framework callback pointers to HAL
4709 *
4710 * PARAMETERS :
4711 *
4712 *
4713 * RETURN     : Success : 0
4714 *              Failure: -ENODEV
4715 *==========================================================================*/
4716
4717int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
4718                                  const camera3_callback_ops_t *callback_ops)
4719{
4720    ALOGV("%s: E", __func__);
4721    QCamera3HardwareInterface *hw =
4722        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4723    if (!hw) {
4724        ALOGE("%s: NULL camera device", __func__);
4725        return -ENODEV;
4726    }
4727
4728    int rc = hw->initialize(callback_ops);
4729    ALOGV("%s: X", __func__);
4730    return rc;
4731}
4732
4733/*===========================================================================
4734 * FUNCTION   : configure_streams
4735 *
4736 * DESCRIPTION:
4737 *
4738 * PARAMETERS :
4739 *
4740 *
4741 * RETURN     : Success: 0
4742 *              Failure: -EINVAL (if stream configuration is invalid)
4743 *                       -ENODEV (fatal error)
4744 *==========================================================================*/
4745
4746int QCamera3HardwareInterface::configure_streams(
4747        const struct camera3_device *device,
4748        camera3_stream_configuration_t *stream_list)
4749{
4750    ALOGV("%s: E", __func__);
4751    QCamera3HardwareInterface *hw =
4752        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4753    if (!hw) {
4754        ALOGE("%s: NULL camera device", __func__);
4755        return -ENODEV;
4756    }
4757    int rc = hw->configureStreams(stream_list);
4758    ALOGV("%s: X", __func__);
4759    return rc;
4760}
4761
4762/*===========================================================================
4763 * FUNCTION   : register_stream_buffers
4764 *
4765 * DESCRIPTION: Register stream buffers with the device
4766 *
4767 * PARAMETERS :
4768 *
4769 * RETURN     :
4770 *==========================================================================*/
4771int QCamera3HardwareInterface::register_stream_buffers(
4772        const struct camera3_device *device,
4773        const camera3_stream_buffer_set_t *buffer_set)
4774{
4775    ALOGV("%s: E", __func__);
4776    QCamera3HardwareInterface *hw =
4777        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4778    if (!hw) {
4779        ALOGE("%s: NULL camera device", __func__);
4780        return -ENODEV;
4781    }
4782    int rc = hw->registerStreamBuffers(buffer_set);
4783    ALOGV("%s: X", __func__);
4784    return rc;
4785}
4786
4787/*===========================================================================
4788 * FUNCTION   : construct_default_request_settings
4789 *
4790 * DESCRIPTION: Configure a settings buffer to meet the required use case
4791 *
4792 * PARAMETERS :
4793 *
4794 *
4795 * RETURN     : Success: Return valid metadata
4796 *              Failure: Return NULL
4797 *==========================================================================*/
4798const camera_metadata_t* QCamera3HardwareInterface::
4799    construct_default_request_settings(const struct camera3_device *device,
4800                                        int type)
4801{
4802
4803    ALOGV("%s: E", __func__);
4804    camera_metadata_t* fwk_metadata = NULL;
4805    QCamera3HardwareInterface *hw =
4806        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4807    if (!hw) {
4808        ALOGE("%s: NULL camera device", __func__);
4809        return NULL;
4810    }
4811
4812    fwk_metadata = hw->translateCapabilityToMetadata(type);
4813
4814    ALOGV("%s: X", __func__);
4815    return fwk_metadata;
4816}
4817
4818/*===========================================================================
4819 * FUNCTION   : process_capture_request
4820 *
4821 * DESCRIPTION:
4822 *
4823 * PARAMETERS :
4824 *
4825 *
4826 * RETURN     :
4827 *==========================================================================*/
4828int QCamera3HardwareInterface::process_capture_request(
4829                    const struct camera3_device *device,
4830                    camera3_capture_request_t *request)
4831{
4832    ALOGV("%s: E", __func__);
4833    QCamera3HardwareInterface *hw =
4834        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4835    if (!hw) {
4836        ALOGE("%s: NULL camera device", __func__);
4837        return -EINVAL;
4838    }
4839
4840    int rc = hw->processCaptureRequest(request);
4841    ALOGV("%s: X", __func__);
4842    return rc;
4843}
4844
4845/*===========================================================================
4846 * FUNCTION   : get_metadata_vendor_tag_ops
4847 *
4848 * DESCRIPTION:
4849 *
4850 * PARAMETERS :
4851 *
4852 *
4853 * RETURN     :
4854 *==========================================================================*/
4855
4856void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4857                const struct camera3_device *device,
4858                vendor_tag_query_ops_t* ops)
4859{
4860    ALOGV("%s: E", __func__);
4861    QCamera3HardwareInterface *hw =
4862        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4863    if (!hw) {
4864        ALOGE("%s: NULL camera device", __func__);
4865        return;
4866    }
4867
4868    hw->getMetadataVendorTagOps(ops);
4869    ALOGV("%s: X", __func__);
4870    return;
4871}
4872
4873/*===========================================================================
4874 * FUNCTION   : dump
4875 *
4876 * DESCRIPTION:
4877 *
4878 * PARAMETERS :
4879 *
4880 *
4881 * RETURN     :
4882 *==========================================================================*/
4883
4884void QCamera3HardwareInterface::dump(
4885                const struct camera3_device *device, int fd)
4886{
4887    ALOGV("%s: E", __func__);
4888    QCamera3HardwareInterface *hw =
4889        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4890    if (!hw) {
4891        ALOGE("%s: NULL camera device", __func__);
4892        return;
4893    }
4894
4895    hw->dump(fd);
4896    ALOGV("%s: X", __func__);
4897    return;
4898}
4899
4900/*===========================================================================
4901 * FUNCTION   : flush
4902 *
4903 * DESCRIPTION:
4904 *
4905 * PARAMETERS :
4906 *
4907 *
4908 * RETURN     :
4909 *==========================================================================*/
4910
4911int QCamera3HardwareInterface::flush(
4912                const struct camera3_device *device)
4913{
4914    int rc;
4915    ALOGV("%s: E", __func__);
4916    QCamera3HardwareInterface *hw =
4917        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4918    if (!hw) {
4919        ALOGE("%s: NULL camera device", __func__);
4920        return -EINVAL;
4921    }
4922
4923    rc = hw->flush();
4924    ALOGV("%s: X", __func__);
4925    return rc;
4926}
4927
4928/*===========================================================================
4929 * FUNCTION   : close_camera_device
4930 *
4931 * DESCRIPTION:
4932 *
4933 * PARAMETERS :
4934 *
4935 *
4936 * RETURN     :
4937 *==========================================================================*/
4938int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4939{
4940    ALOGV("%s: E", __func__);
4941    int ret = NO_ERROR;
4942    QCamera3HardwareInterface *hw =
4943        reinterpret_cast<QCamera3HardwareInterface *>(
4944            reinterpret_cast<camera3_device_t *>(device)->priv);
4945    if (!hw) {
4946        ALOGE("NULL camera device");
4947        return BAD_VALUE;
4948    }
4949    delete hw;
4950
4951    pthread_mutex_lock(&mCameraSessionLock);
4952    mCameraSessionActive = 0;
4953    pthread_mutex_unlock(&mCameraSessionLock);
4954    ALOGV("%s: X", __func__);
4955    return ret;
4956}
4957
4958/*===========================================================================
4959 * FUNCTION   : getWaveletDenoiseProcessPlate
4960 *
4961 * DESCRIPTION: query wavelet denoise process plate
4962 *
4963 * PARAMETERS : None
4964 *
4965 * RETURN     : WNR prcocess plate vlaue
4966 *==========================================================================*/
4967cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4968{
4969    char prop[PROPERTY_VALUE_MAX];
4970    memset(prop, 0, sizeof(prop));
4971    property_get("persist.denoise.process.plates", prop, "0");
4972    int processPlate = atoi(prop);
4973    switch(processPlate) {
4974    case 0:
4975        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4976    case 1:
4977        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4978    case 2:
4979        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4980    case 3:
4981        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4982    default:
4983        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4984    }
4985}
4986
4987/*===========================================================================
4988 * FUNCTION   : needRotationReprocess
4989 *
4990 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4991 *
4992 * PARAMETERS : none
4993 *
4994 * RETURN     : true: needed
4995 *              false: no need
4996 *==========================================================================*/
4997bool QCamera3HardwareInterface::needRotationReprocess()
4998{
4999
5000    if (!mJpegSettings->is_jpeg_format) {
5001        // RAW image, no need to reprocess
5002        return false;
5003    }
5004
5005    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
5006        mJpegSettings->jpeg_orientation > 0) {
5007        // current rotation is not zero, and pp has the capability to process rotation
5008        ALOGD("%s: need do reprocess for rotation", __func__);
5009        return true;
5010    }
5011
5012    return false;
5013}
5014
5015/*===========================================================================
5016 * FUNCTION   : needReprocess
5017 *
5018 * DESCRIPTION: if reprocess in needed
5019 *
5020 * PARAMETERS : none
5021 *
5022 * RETURN     : true: needed
5023 *              false: no need
5024 *==========================================================================*/
5025bool QCamera3HardwareInterface::needReprocess()
5026{
5027    if (!mJpegSettings->is_jpeg_format) {
5028        // RAW image, no need to reprocess
5029        return false;
5030    }
5031
5032    if ((mJpegSettings->min_required_pp_mask > 0) ||
5033         isWNREnabled()) {
5034        // TODO: add for ZSL HDR later
5035        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5036        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5037        return true;
5038    }
5039    return needRotationReprocess();
5040}
5041
5042/*===========================================================================
5043 * FUNCTION   : addOnlineReprocChannel
5044 *
5045 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
5046 *              coming from input channel
5047 *
5048 * PARAMETERS :
5049 *   @pInputChannel : ptr to input channel whose frames will be post-processed
5050 *
5051 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
5052 *==========================================================================*/
5053QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
5054              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
5055{
5056    int32_t rc = NO_ERROR;
5057    QCamera3ReprocessChannel *pChannel = NULL;
5058    if (pInputChannel == NULL) {
5059        ALOGE("%s: input channel obj is NULL", __func__);
5060        return NULL;
5061    }
5062
5063    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5064            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5065    if (NULL == pChannel) {
5066        ALOGE("%s: no mem for reprocess channel", __func__);
5067        return NULL;
5068    }
5069
5070    // Capture channel, only need snapshot and postview streams start together
5071    mm_camera_channel_attr_t attr;
5072    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
5073    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
5074    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
5075    rc = pChannel->initialize();
5076    if (rc != NO_ERROR) {
5077        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5078        delete pChannel;
5079        return NULL;
5080    }
5081
5082    // pp feature config
5083    cam_pp_feature_config_t pp_config;
5084    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5085    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
5086        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5087        pp_config.sharpness = mJpegSettings->sharpness;
5088    }
5089
5090    if (isWNREnabled()) {
5091        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5092        pp_config.denoise2d.denoise_enable = 1;
5093        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5094    }
5095    if (needRotationReprocess()) {
5096        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5097        int rotation = mJpegSettings->jpeg_orientation;
5098        if (rotation == 0) {
5099            pp_config.rotation = ROTATE_0;
5100        } else if (rotation == 90) {
5101            pp_config.rotation = ROTATE_90;
5102        } else if (rotation == 180) {
5103            pp_config.rotation = ROTATE_180;
5104        } else if (rotation == 270) {
5105            pp_config.rotation = ROTATE_270;
5106        }
5107    }
5108
5109   rc = pChannel->addReprocStreamsFromSource(pp_config,
5110                                             pInputChannel,
5111                                             mMetadataChannel);
5112
5113    if (rc != NO_ERROR) {
5114        delete pChannel;
5115        return NULL;
5116    }
5117    return pChannel;
5118}
5119
5120int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
5121{
5122    return gCamCapability[mCameraId]->min_num_pp_bufs;
5123}
5124
5125bool QCamera3HardwareInterface::isWNREnabled() {
5126    return gCamCapability[mCameraId]->isWnrSupported;
5127}
5128
5129}; //end namespace qcamera
5130