QCamera3HWI.cpp revision 88e2894271ddb0d10dcc220a05b71c4f5afaf1d1
1/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34#include <cutils/properties.h>
35#include <hardware/camera3.h>
36#include <camera/CameraMetadata.h>
37#include <stdlib.h>
38#include <fcntl.h>
39#include <stdint.h>
40#include <utils/Log.h>
41#include <utils/Errors.h>
42#include <ui/Fence.h>
43#include <gralloc_priv.h>
44#include "QCamera3HWI.h"
45#include "QCamera3Mem.h"
46#include "QCamera3Channel.h"
47#include "QCamera3PostProc.h"
48
49using namespace android;
50
51namespace qcamera {
52
53#define MAX(a, b) ((a) > (b) ? (a) : (b))
54
55#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
56cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
57parm_buffer_t *prevSettings;
58const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
59
60pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
61    PTHREAD_MUTEX_INITIALIZER;
62unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
63
64const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
65    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
66    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
67    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
68    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
69    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
70    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
71    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
72    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
73    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
74};
75
76const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
77    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
78    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
79    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
80    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
81    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
82    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
83    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
84    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
85    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
86};
87
88const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
89    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
90    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
91    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
92    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
93    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
94    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
95    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
96    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
97    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
98    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
99    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
100    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
101    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
102    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
103    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
104    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
105};
106
107const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
108    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
109    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
110    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
111    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
112    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
113    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
114};
115
116const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
118    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
119    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
120    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
124    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
125    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
126    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
127    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
128    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
129};
130
131const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
132    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
133    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
134    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
135};
136
137const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
138    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
139    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
140};
141
142const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
143    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
144      CAM_FOCUS_UNCALIBRATED },
145    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
146      CAM_FOCUS_APPROXIMATE },
147    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
148      CAM_FOCUS_CALIBRATED }
149};
150
151const int32_t available_thumbnail_sizes[] = {0, 0,
152                                             176, 144,
153                                             320, 240,
154                                             432, 288,
155                                             480, 288,
156                                             512, 288,
157                                             512, 384};
158
159const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
160    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
161    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
162    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
163    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
164    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
165};
166
167camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
168    initialize:                         QCamera3HardwareInterface::initialize,
169    configure_streams:                  QCamera3HardwareInterface::configure_streams,
170    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
171    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
172    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
173    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
174    dump:                               QCamera3HardwareInterface::dump,
175    flush:                              QCamera3HardwareInterface::flush,
176    reserved:                           {0},
177};
178
179int QCamera3HardwareInterface::kMaxInFlight = 5;
180
181/*===========================================================================
182 * FUNCTION   : QCamera3HardwareInterface
183 *
184 * DESCRIPTION: constructor of QCamera3HardwareInterface
185 *
186 * PARAMETERS :
187 *   @cameraId  : camera ID
188 *
189 * RETURN     : none
190 *==========================================================================*/
191QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
192    : mCameraId(cameraId),
193      mCameraHandle(NULL),
194      mCameraOpened(false),
195      mCameraInitialized(false),
196      mCallbackOps(NULL),
197      mInputStream(NULL),
198      mMetadataChannel(NULL),
199      mPictureChannel(NULL),
200      mRawChannel(NULL),
201      mFirstRequest(false),
202      mParamHeap(NULL),
203      mParameters(NULL),
204      mJpegSettings(NULL),
205      mIsZslMode(false),
206      mMinProcessedFrameDuration(0),
207      mMinJpegFrameDuration(0),
208      mMinRawFrameDuration(0),
209      m_pPowerModule(NULL),
210      mHdrHint(false),
211      mMetaFrameCount(0)
212{
213    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
214    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
215    mCameraDevice.common.close = close_camera_device;
216    mCameraDevice.ops = &mCameraOps;
217    mCameraDevice.priv = this;
218    gCamCapability[cameraId]->version = CAM_HAL_V3;
219    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
220    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
221    gCamCapability[cameraId]->min_num_pp_bufs = 3;
222
223    pthread_cond_init(&mRequestCond, NULL);
224    mPendingRequest = 0;
225    mCurrentRequestId = -1;
226    pthread_mutex_init(&mMutex, NULL);
227
228    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
229        mDefaultMetadata[i] = NULL;
230
231#ifdef HAS_MULTIMEDIA_HINTS
232    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
233        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
234    }
235#endif
236}
237
238/*===========================================================================
239 * FUNCTION   : ~QCamera3HardwareInterface
240 *
241 * DESCRIPTION: destructor of QCamera3HardwareInterface
242 *
243 * PARAMETERS : none
244 *
245 * RETURN     : none
246 *==========================================================================*/
247QCamera3HardwareInterface::~QCamera3HardwareInterface()
248{
249    ALOGV("%s: E", __func__);
250    /* We need to stop all streams before deleting any stream */
251        /*flush the metadata list*/
252    if (!mStoredMetadataList.empty()) {
253        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
254              m != mStoredMetadataList.end(); ) {
255            mMetadataChannel->bufDone(m->meta_buf);
256            free(m->meta_buf);
257            m = mStoredMetadataList.erase(m);
258        }
259    }
260
261    // NOTE: 'camera3_stream_t *' objects are already freed at
262    //        this stage by the framework
263    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
264        it != mStreamInfo.end(); it++) {
265        QCamera3Channel *channel = (*it)->channel;
266        if (channel) {
267            channel->stop();
268        }
269    }
270
271    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
272        it != mStreamInfo.end(); it++) {
273        QCamera3Channel *channel = (*it)->channel;
274        if ((*it)->registered && (*it)->buffer_set.buffers) {
275             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
276        }
277        if (channel)
278            delete channel;
279        free (*it);
280    }
281
282    mPictureChannel = NULL;
283
284    if (mJpegSettings != NULL) {
285        free(mJpegSettings);
286        mJpegSettings = NULL;
287    }
288
289    /* Clean up all channels */
290    if (mCameraInitialized) {
291        if (mMetadataChannel) {
292            mMetadataChannel->stop();
293            delete mMetadataChannel;
294            mMetadataChannel = NULL;
295        }
296        deinitParameters();
297    }
298
299    if (mCameraOpened)
300        closeCamera();
301
302    mPendingBuffersMap.mPendingBufferList.clear();
303    mPendingRequestsList.clear();
304
305    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
306        if (mDefaultMetadata[i])
307            free_camera_metadata(mDefaultMetadata[i]);
308
309    pthread_cond_destroy(&mRequestCond);
310
311    pthread_mutex_destroy(&mMutex);
312    ALOGV("%s: X", __func__);
313}
314
315/*===========================================================================
316 * FUNCTION   : openCamera
317 *
318 * DESCRIPTION: open camera
319 *
320 * PARAMETERS :
321 *   @hw_device  : double ptr for camera device struct
322 *
323 * RETURN     : int32_t type of status
324 *              NO_ERROR  -- success
325 *              none-zero failure code
326 *==========================================================================*/
327int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
328{
329    int rc = 0;
330    pthread_mutex_lock(&mCameraSessionLock);
331    if (mCameraSessionActive) {
332        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
333        pthread_mutex_unlock(&mCameraSessionLock);
334        return -EUSERS;
335    }
336
337    if (mCameraOpened) {
338        *hw_device = NULL;
339        return PERMISSION_DENIED;
340    }
341
342    rc = openCamera();
343    if (rc == 0) {
344        *hw_device = &mCameraDevice.common;
345        mCameraSessionActive = 1;
346    } else
347        *hw_device = NULL;
348
349#ifdef HAS_MULTIMEDIA_HINTS
350    if (rc == 0) {
351        if (m_pPowerModule) {
352            if (m_pPowerModule->powerHint) {
353                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
354                        (void *)"state=1");
355            }
356        }
357    }
358#endif
359    pthread_mutex_unlock(&mCameraSessionLock);
360    return rc;
361}
362
363/*===========================================================================
364 * FUNCTION   : openCamera
365 *
366 * DESCRIPTION: open camera
367 *
368 * PARAMETERS : none
369 *
370 * RETURN     : int32_t type of status
371 *              NO_ERROR  -- success
372 *              none-zero failure code
373 *==========================================================================*/
374int QCamera3HardwareInterface::openCamera()
375{
376    if (mCameraHandle) {
377        ALOGE("Failure: Camera already opened");
378        return ALREADY_EXISTS;
379    }
380    mCameraHandle = camera_open(mCameraId);
381    if (!mCameraHandle) {
382        ALOGE("camera_open failed.");
383        return UNKNOWN_ERROR;
384    }
385
386    mCameraOpened = true;
387
388    return NO_ERROR;
389}
390
391/*===========================================================================
392 * FUNCTION   : closeCamera
393 *
394 * DESCRIPTION: close camera
395 *
396 * PARAMETERS : none
397 *
398 * RETURN     : int32_t type of status
399 *              NO_ERROR  -- success
400 *              none-zero failure code
401 *==========================================================================*/
402int QCamera3HardwareInterface::closeCamera()
403{
404    int rc = NO_ERROR;
405
406    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
407    mCameraHandle = NULL;
408    mCameraOpened = false;
409
410#ifdef HAS_MULTIMEDIA_HINTS
411    if (rc == NO_ERROR) {
412        if (m_pPowerModule) {
413            if (m_pPowerModule->powerHint) {
414                if(mHdrHint == true) {
415                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
416                            (void *)"state=3");
417                    mHdrHint = false;
418                }
419                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
420                        (void *)"state=0");
421            }
422        }
423    }
424#endif
425
426    return rc;
427}
428
429/*===========================================================================
430 * FUNCTION   : initialize
431 *
432 * DESCRIPTION: Initialize frameworks callback functions
433 *
434 * PARAMETERS :
435 *   @callback_ops : callback function to frameworks
436 *
437 * RETURN     :
438 *
439 *==========================================================================*/
440int QCamera3HardwareInterface::initialize(
441        const struct camera3_callback_ops *callback_ops)
442{
443    int rc;
444
445    pthread_mutex_lock(&mMutex);
446
447    rc = initParameters();
448    if (rc < 0) {
449        ALOGE("%s: initParamters failed %d", __func__, rc);
450       goto err1;
451    }
452    mCallbackOps = callback_ops;
453
454    pthread_mutex_unlock(&mMutex);
455    mCameraInitialized = true;
456    return 0;
457
458err1:
459    pthread_mutex_unlock(&mMutex);
460    return rc;
461}
462
463/*===========================================================================
464 * FUNCTION   : configureStreams
465 *
466 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
467 *              and output streams.
468 *
469 * PARAMETERS :
470 *   @stream_list : streams to be configured
471 *
472 * RETURN     :
473 *
474 *==========================================================================*/
475int QCamera3HardwareInterface::configureStreams(
476        camera3_stream_configuration_t *streamList)
477{
478    int rc = 0;
479    mIsZslMode = false;
480
481    // Sanity check stream_list
482    if (streamList == NULL) {
483        ALOGE("%s: NULL stream configuration", __func__);
484        return BAD_VALUE;
485    }
486    if (streamList->streams == NULL) {
487        ALOGE("%s: NULL stream list", __func__);
488        return BAD_VALUE;
489    }
490
491    if (streamList->num_streams < 1) {
492        ALOGE("%s: Bad number of streams requested: %d", __func__,
493                streamList->num_streams);
494        return BAD_VALUE;
495    }
496
497    /* first invalidate all the steams in the mStreamList
498     * if they appear again, they will be validated */
499    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
500            it != mStreamInfo.end(); it++) {
501        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
502        channel->stop();
503        (*it)->status = INVALID;
504    }
505    if (mMetadataChannel) {
506        /* If content of mStreamInfo is not 0, there is metadata stream */
507        mMetadataChannel->stop();
508    }
509
510#ifdef HAS_MULTIMEDIA_HINTS
511    if(mHdrHint == true) {
512        if (m_pPowerModule) {
513            if (m_pPowerModule->powerHint) {
514                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
515                        (void *)"state=3");
516                mHdrHint = false;
517            }
518        }
519    }
520#endif
521
522    pthread_mutex_lock(&mMutex);
523
524    camera3_stream_t *inputStream = NULL;
525    camera3_stream_t *jpegStream = NULL;
526    cam_stream_size_info_t stream_config_info;
527
528    for (size_t i = 0; i < streamList->num_streams; i++) {
529        camera3_stream_t *newStream = streamList->streams[i];
530        ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
531                __func__, newStream->stream_type, newStream->format,
532                 newStream->width, newStream->height);
533        //if the stream is in the mStreamList validate it
534        bool stream_exists = false;
535        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
536                it != mStreamInfo.end(); it++) {
537            if ((*it)->stream == newStream) {
538                QCamera3Channel *channel =
539                    (QCamera3Channel*)(*it)->stream->priv;
540                stream_exists = true;
541                (*it)->status = RECONFIGURE;
542                /*delete the channel object associated with the stream because
543                  we need to reconfigure*/
544                delete channel;
545                (*it)->stream->priv = NULL;
546                (*it)->channel = NULL;
547            }
548        }
549        if (!stream_exists) {
550            //new stream
551            stream_info_t* stream_info;
552            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
553            stream_info->stream = newStream;
554            stream_info->status = VALID;
555            stream_info->registered = 0;
556            stream_info->channel = NULL;
557            mStreamInfo.push_back(stream_info);
558        }
559        if (newStream->stream_type == CAMERA3_STREAM_INPUT
560                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
561            if (inputStream != NULL) {
562                ALOGE("%s: Multiple input streams requested!", __func__);
563                pthread_mutex_unlock(&mMutex);
564                return BAD_VALUE;
565            }
566            inputStream = newStream;
567        }
568        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
569            jpegStream = newStream;
570        }
571    }
572    mInputStream = inputStream;
573
574    /*clean up invalid streams*/
575    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
576            it != mStreamInfo.end();) {
577        if(((*it)->status) == INVALID){
578            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
579            delete channel;
580            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
581            free(*it);
582            it = mStreamInfo.erase(it);
583        } else {
584            it++;
585        }
586    }
587
588    if (mMetadataChannel) {
589        delete mMetadataChannel;
590        mMetadataChannel = NULL;
591    }
592
593    //Create metadata channel and initialize it
594    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
595                    mCameraHandle->ops, captureResultCb,
596                    &gCamCapability[mCameraId]->padding_info, this);
597    if (mMetadataChannel == NULL) {
598        ALOGE("%s: failed to allocate metadata channel", __func__);
599        rc = -ENOMEM;
600        pthread_mutex_unlock(&mMutex);
601        return rc;
602    }
603    rc = mMetadataChannel->initialize();
604    if (rc < 0) {
605        ALOGE("%s: metadata channel initialization failed", __func__);
606        delete mMetadataChannel;
607        mMetadataChannel = NULL;
608        pthread_mutex_unlock(&mMutex);
609        return rc;
610    }
611
612    /* Allocate channel objects for the requested streams */
613    for (size_t i = 0; i < streamList->num_streams; i++) {
614        camera3_stream_t *newStream = streamList->streams[i];
615        uint32_t stream_usage = newStream->usage;
616        stream_config_info.stream_sizes[i].width = newStream->width;
617        stream_config_info.stream_sizes[i].height = newStream->height;
618        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
619            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
620            //for zsl stream the size is jpeg size
621            stream_config_info.stream_sizes[i].width = jpegStream->width;
622            stream_config_info.stream_sizes[i].height = jpegStream->height;
623            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
624        } else {
625           //for non zsl streams find out the format
626           switch (newStream->format) {
627           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
628              {
629                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
630                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
631                 } else {
632                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
633                 }
634              }
635              break;
636           case HAL_PIXEL_FORMAT_YCbCr_420_888:
637              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
638#ifdef HAS_MULTIMEDIA_HINTS
639              if (m_pPowerModule) {
640                  if (m_pPowerModule->powerHint) {
641                      m_pPowerModule->powerHint(m_pPowerModule,
642                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
643                      mHdrHint = true;
644                  }
645              }
646#endif
647              break;
648           case HAL_PIXEL_FORMAT_BLOB:
649              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
650              break;
651           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
652           case HAL_PIXEL_FORMAT_RAW16:
653              stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
654              break;
655           default:
656              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
657              break;
658           }
659        }
660        if (newStream->priv == NULL) {
661            //New stream, construct channel
662            switch (newStream->stream_type) {
663            case CAMERA3_STREAM_INPUT:
664                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
665                break;
666            case CAMERA3_STREAM_BIDIRECTIONAL:
667                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
668                    GRALLOC_USAGE_HW_CAMERA_WRITE;
669                break;
670            case CAMERA3_STREAM_OUTPUT:
671                /* For video encoding stream, set read/write rarely
672                 * flag so that they may be set to un-cached */
673                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
674                    newStream->usage =
675                         (GRALLOC_USAGE_SW_READ_RARELY |
676                         GRALLOC_USAGE_SW_WRITE_RARELY |
677                         GRALLOC_USAGE_HW_CAMERA_WRITE);
678                else
679                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
680                break;
681            default:
682                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
683                break;
684            }
685
686            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
687                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
688                QCamera3Channel *channel;
689                switch (newStream->format) {
690                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
691                case HAL_PIXEL_FORMAT_YCbCr_420_888:
692                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
693                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
694                        jpegStream)
695                        mIsZslMode = true;
696                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
697                            mCameraHandle->ops, captureResultCb,
698                            &gCamCapability[mCameraId]->padding_info, this, newStream);
699                    if (channel == NULL) {
700                        ALOGE("%s: allocation of channel failed", __func__);
701                        pthread_mutex_unlock(&mMutex);
702                        return -ENOMEM;
703                    }
704
705                    newStream->priv = channel;
706                    break;
707                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
708                case HAL_PIXEL_FORMAT_RAW16:
709                    newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
710                    mRawChannel = new QCamera3RawChannel(
711                            mCameraHandle->camera_handle,
712                            mCameraHandle->ops, captureResultCb,
713                            &gCamCapability[mCameraId]->padding_info,
714                            this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
715                    if (mRawChannel == NULL) {
716                        ALOGE("%s: allocation of raw channel failed", __func__);
717                        pthread_mutex_unlock(&mMutex);
718                        return -ENOMEM;
719                    }
720
721                    newStream->priv = (QCamera3Channel*)mRawChannel;
722                    break;
723                case HAL_PIXEL_FORMAT_BLOB:
724                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
725                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
726                            mCameraHandle->ops, captureResultCb,
727                            &gCamCapability[mCameraId]->padding_info, this, newStream);
728                    if (mPictureChannel == NULL) {
729                        ALOGE("%s: allocation of channel failed", __func__);
730                        pthread_mutex_unlock(&mMutex);
731                        return -ENOMEM;
732                    }
733                    newStream->priv = (QCamera3Channel*)mPictureChannel;
734                    break;
735
736                //TODO: Add support for app consumed format?
737                default:
738                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
739                    break;
740                }
741            }
742
743            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
744                    it != mStreamInfo.end(); it++) {
745                if ((*it)->stream == newStream) {
746                    (*it)->channel = (QCamera3Channel*) newStream->priv;
747                    break;
748                }
749            }
750        } else {
751            // Channel already exists for this stream
752            // Do nothing for now
753        }
754    }
755
756    int32_t hal_version = CAM_HAL_V3;
757    stream_config_info.num_streams = streamList->num_streams;
758
759    // settings/parameters don't carry over for new configureStreams
760    memset(mParameters, 0, sizeof(parm_buffer_t));
761
762    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
763    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
764                sizeof(hal_version), &hal_version);
765
766    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
767                sizeof(stream_config_info), &stream_config_info);
768
769    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
770
771    /*For the streams to be reconfigured we need to register the buffers
772      since the framework wont*/
773    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
774            it != mStreamInfo.end(); it++) {
775        if ((*it)->status == RECONFIGURE) {
776            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
777            /*only register buffers for streams that have already been
778              registered*/
779            if ((*it)->registered) {
780                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
781                        (*it)->buffer_set.buffers);
782                if (rc != NO_ERROR) {
783                    ALOGE("%s: Failed to register the buffers of old stream,\
784                            rc = %d", __func__, rc);
785                }
786                ALOGV("%s: channel %p has %d buffers",
787                        __func__, channel, (*it)->buffer_set.num_buffers);
788            }
789        }
790    }
791
792    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
793    mPendingRequestsList.clear();
794    mPendingFrameDropList.clear();
795    // Initialize/Reset the pending buffers list
796    mPendingBuffersMap.num_buffers = 0;
797    mPendingBuffersMap.mPendingBufferList.clear();
798
799    /*flush the metadata list*/
800    if (!mStoredMetadataList.empty()) {
801        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
802              m != mStoredMetadataList.end(); m++) {
803            mMetadataChannel->bufDone(m->meta_buf);
804            free(m->meta_buf);
805            m = mStoredMetadataList.erase(m);
806        }
807    }
808
809    mFirstRequest = true;
810
811    //Get min frame duration for this streams configuration
812    deriveMinFrameDuration();
813
814    pthread_mutex_unlock(&mMutex);
815    return rc;
816}
817
818/*===========================================================================
819 * FUNCTION   : validateCaptureRequest
820 *
821 * DESCRIPTION: validate a capture request from camera service
822 *
823 * PARAMETERS :
824 *   @request : request from framework to process
825 *
826 * RETURN     :
827 *
828 *==========================================================================*/
829int QCamera3HardwareInterface::validateCaptureRequest(
830                    camera3_capture_request_t *request)
831{
832    ssize_t idx = 0;
833    const camera3_stream_buffer_t *b;
834    CameraMetadata meta;
835
836    /* Sanity check the request */
837    if (request == NULL) {
838        ALOGE("%s: NULL capture request", __func__);
839        return BAD_VALUE;
840    }
841
842    uint32_t frameNumber = request->frame_number;
843    if (request->input_buffer != NULL &&
844            request->input_buffer->stream != mInputStream) {
845        ALOGE("%s: Request %d: Input buffer not from input stream!",
846                __FUNCTION__, frameNumber);
847        return BAD_VALUE;
848    }
849    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
850        ALOGE("%s: Request %d: No output buffers provided!",
851                __FUNCTION__, frameNumber);
852        return BAD_VALUE;
853    }
854    if (request->input_buffer != NULL) {
855        b = request->input_buffer;
856        QCamera3Channel *channel =
857            static_cast<QCamera3Channel*>(b->stream->priv);
858        if (channel == NULL) {
859            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
860                    __func__, frameNumber, idx);
861            return BAD_VALUE;
862        }
863        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
864            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
865                    __func__, frameNumber, idx);
866            return BAD_VALUE;
867        }
868        if (b->release_fence != -1) {
869            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
870                    __func__, frameNumber, idx);
871            return BAD_VALUE;
872        }
873        if (b->buffer == NULL) {
874            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
875                    __func__, frameNumber, idx);
876            return BAD_VALUE;
877        }
878    }
879
880    // Validate all buffers
881    b = request->output_buffers;
882    do {
883        QCamera3Channel *channel =
884                static_cast<QCamera3Channel*>(b->stream->priv);
885        if (channel == NULL) {
886            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
887                    __func__, frameNumber, idx);
888            return BAD_VALUE;
889        }
890        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
891            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
892                    __func__, frameNumber, idx);
893            return BAD_VALUE;
894        }
895        if (b->release_fence != -1) {
896            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
897                    __func__, frameNumber, idx);
898            return BAD_VALUE;
899        }
900        if (b->buffer == NULL) {
901            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
902                    __func__, frameNumber, idx);
903            return BAD_VALUE;
904        }
905        idx++;
906        b = request->output_buffers + idx;
907    } while (idx < (ssize_t)request->num_output_buffers);
908
909    return NO_ERROR;
910}
911
912/*===========================================================================
913 * FUNCTION   : deriveMinFrameDuration
914 *
915 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
916 *              on currently configured streams.
917 *
918 * PARAMETERS : NONE
919 *
920 * RETURN     : NONE
921 *
922 *==========================================================================*/
923void QCamera3HardwareInterface::deriveMinFrameDuration()
924{
925    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
926
927    maxJpegDim = 0;
928    maxProcessedDim = 0;
929    maxRawDim = 0;
930
931    // Figure out maximum jpeg, processed, and raw dimensions
932    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
933        it != mStreamInfo.end(); it++) {
934
935        // Input stream doesn't have valid stream_type
936        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
937            continue;
938
939        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
940        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
941            if (dimension > maxJpegDim)
942                maxJpegDim = dimension;
943        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
944                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
945            if (dimension > maxRawDim)
946                maxRawDim = dimension;
947        } else {
948            if (dimension > maxProcessedDim)
949                maxProcessedDim = dimension;
950        }
951    }
952
953    //Assume all jpeg dimensions are in processed dimensions.
954    if (maxJpegDim > maxProcessedDim)
955        maxProcessedDim = maxJpegDim;
956    //Find the smallest raw dimension that is greater or equal to jpeg dimension
957    if (maxProcessedDim > maxRawDim) {
958        maxRawDim = INT32_MAX;
959        for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
960            i++) {
961
962            int32_t dimension =
963                gCamCapability[mCameraId]->raw_dim[i].width *
964                gCamCapability[mCameraId]->raw_dim[i].height;
965
966            if (dimension >= maxProcessedDim && dimension < maxRawDim)
967                maxRawDim = dimension;
968        }
969    }
970
971    //Find minimum durations for processed, jpeg, and raw
972    for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
973            i++) {
974        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
975                gCamCapability[mCameraId]->raw_dim[i].height) {
976            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
977            break;
978        }
979    }
980    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
981        if (maxProcessedDim ==
982            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
983            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
984            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
985            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
986            break;
987        }
988    }
989}
990
991/*===========================================================================
992 * FUNCTION   : getMinFrameDuration
993 *
994 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
995 *              and current request configuration.
996 *
997 * PARAMETERS : @request: requset sent by the frameworks
998 *
999 * RETURN     : min farme duration for a particular request
1000 *
1001 *==========================================================================*/
1002int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1003{
1004    bool hasJpegStream = false;
1005    bool hasRawStream = false;
1006    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1007        const camera3_stream_t *stream = request->output_buffers[i].stream;
1008        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1009            hasJpegStream = true;
1010        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1011                stream->format == HAL_PIXEL_FORMAT_RAW16)
1012            hasRawStream = true;
1013    }
1014
1015    if (!hasJpegStream)
1016        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1017    else
1018        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1019}
1020
1021/*===========================================================================
1022 * FUNCTION   : handleMetadataWithLock
1023 *
1024 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1025 *
1026 * PARAMETERS : @metadata_buf: metadata buffer
1027 *
1028 * RETURN     :
1029 *
1030 *==========================================================================*/
1031void QCamera3HardwareInterface::handleMetadataWithLock(
1032    mm_camera_super_buf_t *metadata_buf)
1033{
1034    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1035    int32_t frame_number_valid = *(int32_t *)
1036        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1037    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1038        CAM_INTF_META_PENDING_REQUESTS, metadata);
1039    uint32_t frame_number = *(uint32_t *)
1040        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1041    const struct timeval *tv = (const struct timeval *)
1042        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1043    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1044        tv->tv_usec * NSEC_PER_USEC;
1045    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1046        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1047
1048    int32_t urgent_frame_number_valid = *(int32_t *)
1049        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1050    uint32_t urgent_frame_number = *(uint32_t *)
1051        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1052
1053    if (urgent_frame_number_valid) {
1054        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1055          __func__, urgent_frame_number, capture_time);
1056
1057        //Recieved an urgent Frame Number, handle it
1058        //using HAL3.1 quirk for partial results
1059        for (List<PendingRequestInfo>::iterator i =
1060            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1061            camera3_notify_msg_t notify_msg;
1062            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1063                __func__, i->frame_number, urgent_frame_number);
1064
1065            if (i->frame_number < urgent_frame_number &&
1066                i->bNotified == 0) {
1067                notify_msg.type = CAMERA3_MSG_SHUTTER;
1068                notify_msg.message.shutter.frame_number = i->frame_number;
1069                notify_msg.message.shutter.timestamp = capture_time -
1070                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1071                mCallbackOps->notify(mCallbackOps, &notify_msg);
1072                i->timestamp = notify_msg.message.shutter.timestamp;
1073                i->bNotified = 1;
1074                ALOGV("%s: Dummy notification !!!! notify frame_number = %d, capture_time = %lld",
1075                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1076            }
1077
1078            if (i->frame_number == urgent_frame_number) {
1079
1080                camera3_capture_result_t result;
1081
1082                // Send shutter notify to frameworks
1083                notify_msg.type = CAMERA3_MSG_SHUTTER;
1084                notify_msg.message.shutter.frame_number = i->frame_number;
1085                notify_msg.message.shutter.timestamp = capture_time;
1086                mCallbackOps->notify(mCallbackOps, &notify_msg);
1087
1088                i->timestamp = capture_time;
1089                i->bNotified = 1;
1090
1091                // Extract 3A metadata
1092                result.result =
1093                    translateCbUrgentMetadataToResultMetadata(metadata);
1094                // Populate metadata result
1095                result.frame_number = urgent_frame_number;
1096                result.num_output_buffers = 0;
1097                result.output_buffers = NULL;
1098                mCallbackOps->process_capture_result(mCallbackOps, &result);
1099                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1100                     __func__, result.frame_number, capture_time);
1101                free_camera_metadata((camera_metadata_t *)result.result);
1102                break;
1103            }
1104        }
1105    }
1106
1107    if (!frame_number_valid) {
1108        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1109        mMetadataChannel->bufDone(metadata_buf);
1110        free(metadata_buf);
1111        goto done_metadata;
1112    }
1113    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1114            frame_number, capture_time);
1115
1116    // Go through the pending requests info and send shutter/results to frameworks
1117    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1118        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1119        camera3_capture_result_t result;
1120        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1121
1122        // Flush out all entries with less or equal frame numbers.
1123        mPendingRequest--;
1124
1125        // Check whether any stream buffer corresponding to this is dropped or not
1126        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1127        // buffer with CAMERA3_BUFFER_STATUS_ERROR
1128        if (cam_frame_drop.frame_dropped) {
1129            camera3_notify_msg_t notify_msg;
1130            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1131                    j != i->buffers.end(); j++) {
1132                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1133                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1134                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1135                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1136                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1137                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1138                             __func__, i->frame_number, streamID);
1139                      notify_msg.type = CAMERA3_MSG_ERROR;
1140                      notify_msg.message.error.frame_number = i->frame_number;
1141                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1142                      notify_msg.message.error.error_stream = j->stream;
1143                      mCallbackOps->notify(mCallbackOps, &notify_msg);
1144                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1145                             __func__, i->frame_number, streamID);
1146                      PendingFrameDropInfo PendingFrameDrop;
1147                      PendingFrameDrop.frame_number=i->frame_number;
1148                      PendingFrameDrop.stream_ID = streamID;
1149                      // Add the Frame drop info to mPendingFrameDropList
1150                      mPendingFrameDropList.push_back(PendingFrameDrop);
1151                  }
1152                }
1153            }
1154        }
1155
1156        // Send empty metadata with already filled buffers for dropped metadata
1157        // and send valid metadata with already filled buffers for current metadata
1158        if (i->frame_number < frame_number) {
1159            CameraMetadata dummyMetadata;
1160            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1161                    &i->timestamp, 1);
1162            dummyMetadata.update(ANDROID_REQUEST_ID,
1163                    &(i->request_id), 1);
1164            result.result = dummyMetadata.release();
1165        } else {
1166            result.result = translateCbMetadataToResultMetadata(metadata,
1167                    i->timestamp, i->request_id, i->blob_request,
1168                    &(i->input_jpeg_settings), metadata_buf->bufs[0]->frame_idx);
1169            if (mIsZslMode) {
1170                int found_metadata = 0;
1171                //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1172                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1173                    j != i->buffers.end(); j++) {
1174                    if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1175                        //check if corresp. zsl already exists in the stored metadata list
1176                        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1177                                m != mStoredMetadataList.begin(); m++) {
1178                            if (m->frame_number == frame_number) {
1179                                m->meta_buf = metadata_buf;
1180                                found_metadata = 1;
1181                                break;
1182                            }
1183                        }
1184                        if (!found_metadata) {
1185                            MetadataBufferInfo store_meta_info;
1186                            store_meta_info.meta_buf = metadata_buf;
1187                            store_meta_info.frame_number = frame_number;
1188                            mStoredMetadataList.push_back(store_meta_info);
1189                            found_metadata = 1;
1190                        }
1191                    }
1192                }
1193                if (!found_metadata) {
1194                    if (!i->input_buffer_present && i->blob_request) {
1195                        //livesnapshot or fallback non-zsl snapshot case
1196                        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1197                                j != i->buffers.end(); j++){
1198                            if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1199                                j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1200                                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1201                                break;
1202                            }
1203                        }
1204                    } else {
1205                        //return the metadata immediately
1206                        mMetadataChannel->bufDone(metadata_buf);
1207                        free(metadata_buf);
1208                    }
1209                }
1210            } else if (!mIsZslMode && i->blob_request) {
1211                //If it is a blob request then send the metadata to the picture channel
1212                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1213            } else {
1214                // Return metadata buffer
1215                mMetadataChannel->bufDone(metadata_buf);
1216                free(metadata_buf);
1217            }
1218        }
1219        if (!result.result) {
1220            ALOGE("%s: metadata is NULL", __func__);
1221        }
1222        result.frame_number = i->frame_number;
1223        result.num_output_buffers = 0;
1224        result.output_buffers = NULL;
1225        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1226                    j != i->buffers.end(); j++) {
1227            if (j->buffer) {
1228                result.num_output_buffers++;
1229            }
1230        }
1231
1232        if (result.num_output_buffers > 0) {
1233            camera3_stream_buffer_t *result_buffers =
1234                new camera3_stream_buffer_t[result.num_output_buffers];
1235            if (!result_buffers) {
1236                ALOGE("%s: Fatal error: out of memory", __func__);
1237            }
1238            size_t result_buffers_idx = 0;
1239            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1240                    j != i->buffers.end(); j++) {
1241                if (j->buffer) {
1242                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1243                            m != mPendingFrameDropList.end(); m++) {
1244                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1245                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1246                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1247                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1248                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1249                                  __func__, frame_number, streamID);
1250                            m = mPendingFrameDropList.erase(m);
1251                            break;
1252                        }
1253                    }
1254
1255                    for (List<PendingBufferInfo>::iterator k =
1256                      mPendingBuffersMap.mPendingBufferList.begin();
1257                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1258                      if (k->buffer == j->buffer->buffer) {
1259                        ALOGV("%s: Found buffer %p in pending buffer List "
1260                              "for frame %d, Take it out!!", __func__,
1261                               k->buffer, k->frame_number);
1262                        mPendingBuffersMap.num_buffers--;
1263                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
1264                        break;
1265                      }
1266                    }
1267
1268                    result_buffers[result_buffers_idx++] = *(j->buffer);
1269                    free(j->buffer);
1270                    j->buffer = NULL;
1271                }
1272            }
1273            result.output_buffers = result_buffers;
1274
1275            mCallbackOps->process_capture_result(mCallbackOps, &result);
1276            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1277                    __func__, result.frame_number, i->timestamp);
1278            free_camera_metadata((camera_metadata_t *)result.result);
1279            delete[] result_buffers;
1280        } else {
1281            mCallbackOps->process_capture_result(mCallbackOps, &result);
1282            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1283                        __func__, result.frame_number, i->timestamp);
1284            free_camera_metadata((camera_metadata_t *)result.result);
1285        }
1286        // erase the element from the list
1287        i = mPendingRequestsList.erase(i);
1288    }
1289
1290done_metadata:
1291    if (!pending_requests)
1292        unblockRequestIfNecessary();
1293
1294}
1295
1296/*===========================================================================
1297 * FUNCTION   : handleBufferWithLock
1298 *
1299 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1300 *
1301 * PARAMETERS : @buffer: image buffer for the callback
1302 *              @frame_number: frame number of the image buffer
1303 *
1304 * RETURN     :
1305 *
1306 *==========================================================================*/
1307void QCamera3HardwareInterface::handleBufferWithLock(
1308    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1309{
1310    // If the frame number doesn't exist in the pending request list,
1311    // directly send the buffer to the frameworks, and update pending buffers map
1312    // Otherwise, book-keep the buffer.
1313    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1314    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1315        i++;
1316    }
1317    if (i == mPendingRequestsList.end()) {
1318        // Verify all pending requests frame_numbers are greater
1319        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1320                j != mPendingRequestsList.end(); j++) {
1321            if (j->frame_number < frame_number) {
1322                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1323                        __func__, j->frame_number, frame_number);
1324            }
1325        }
1326        camera3_capture_result_t result;
1327        result.result = NULL;
1328        result.frame_number = frame_number;
1329        result.num_output_buffers = 1;
1330        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1331                m != mPendingFrameDropList.end(); m++) {
1332            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1333            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1334            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1335                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1336                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1337                        __func__, frame_number, streamID);
1338                m = mPendingFrameDropList.erase(m);
1339                break;
1340            }
1341        }
1342        result.output_buffers = buffer;
1343        ALOGV("%s: result frame_number = %d, buffer = %p",
1344                __func__, frame_number, buffer->buffer);
1345
1346        for (List<PendingBufferInfo>::iterator k =
1347                mPendingBuffersMap.mPendingBufferList.begin();
1348                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1349            if (k->buffer == buffer->buffer) {
1350                ALOGV("%s: Found Frame buffer, take it out from list",
1351                        __func__);
1352
1353                mPendingBuffersMap.num_buffers--;
1354                k = mPendingBuffersMap.mPendingBufferList.erase(k);
1355                break;
1356            }
1357        }
1358        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1359            __func__, mPendingBuffersMap.num_buffers);
1360
1361        if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1362            int found = 0;
1363            for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1364                k != mStoredMetadataList.end(); k++) {
1365                if (k->frame_number == frame_number) {
1366                    k->zsl_buf_hdl = buffer->buffer;
1367                    found = 1;
1368                    break;
1369                }
1370            }
1371            if (!found) {
1372                MetadataBufferInfo meta_info;
1373                meta_info.frame_number = frame_number;
1374                meta_info.zsl_buf_hdl = buffer->buffer;
1375                mStoredMetadataList.push_back(meta_info);
1376            }
1377        }
1378        mCallbackOps->process_capture_result(mCallbackOps, &result);
1379    } else {
1380        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1381                j != i->buffers.end(); j++) {
1382            if (j->stream == buffer->stream) {
1383                if (j->buffer != NULL) {
1384                    ALOGE("%s: Error: buffer is already set", __func__);
1385                } else {
1386                    j->buffer = (camera3_stream_buffer_t *)malloc(
1387                            sizeof(camera3_stream_buffer_t));
1388                    *(j->buffer) = *buffer;
1389                    ALOGV("%s: cache buffer %p at result frame_number %d",
1390                            __func__, buffer, frame_number);
1391                }
1392            }
1393        }
1394    }
1395}
1396
1397/*===========================================================================
1398 * FUNCTION   : unblockRequestIfNecessary
1399 *
1400 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1401 *              that mMutex is held when this function is called.
1402 *
1403 * PARAMETERS :
1404 *
1405 * RETURN     :
1406 *
1407 *==========================================================================*/
1408void QCamera3HardwareInterface::unblockRequestIfNecessary()
1409{
1410    bool max_buffers_dequeued = false;
1411
1412    uint32_t queued_buffers = 0;
1413    for(List<stream_info_t*>::iterator it=mStreamInfo.begin();
1414        it != mStreamInfo.end(); it++) {
1415        queued_buffers = 0;
1416        for (List<PendingBufferInfo>::iterator k =
1417            mPendingBuffersMap.mPendingBufferList.begin();
1418            k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1419            if (k->stream == (*it)->stream)
1420                queued_buffers++;
1421
1422            ALOGV("%s: Dequeued %d buffers for stream %p", __func__,
1423                queued_buffers, (*it)->stream);
1424            if (queued_buffers >=(* it)->stream->max_buffers) {
1425                ALOGV("%s: Wait!!! Max buffers Dequed", __func__);
1426                max_buffers_dequeued = true;
1427                break;
1428            }
1429        }
1430    }
1431
1432    if (!max_buffers_dequeued) {
1433        // Unblock process_capture_request
1434        pthread_cond_signal(&mRequestCond);
1435    }
1436}
1437
1438/*===========================================================================
1439 * FUNCTION   : registerStreamBuffers
1440 *
1441 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1442 *
1443 * PARAMETERS :
1444 *   @stream_list : streams to be configured
1445 *
1446 * RETURN     :
1447 *
1448 *==========================================================================*/
1449int QCamera3HardwareInterface::registerStreamBuffers(
1450        const camera3_stream_buffer_set_t *buffer_set)
1451{
1452    int rc = 0;
1453
1454    pthread_mutex_lock(&mMutex);
1455
1456    if (buffer_set == NULL) {
1457        ALOGE("%s: Invalid buffer_set parameter.", __func__);
1458        pthread_mutex_unlock(&mMutex);
1459        return -EINVAL;
1460    }
1461    if (buffer_set->stream == NULL) {
1462        ALOGE("%s: Invalid stream parameter.", __func__);
1463        pthread_mutex_unlock(&mMutex);
1464        return -EINVAL;
1465    }
1466    if (buffer_set->num_buffers < 1) {
1467        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
1468        pthread_mutex_unlock(&mMutex);
1469        return -EINVAL;
1470    }
1471    if (buffer_set->buffers == NULL) {
1472        ALOGE("%s: Invalid buffers parameter.", __func__);
1473        pthread_mutex_unlock(&mMutex);
1474        return -EINVAL;
1475    }
1476
1477    camera3_stream_t *stream = buffer_set->stream;
1478    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
1479
1480    //set the buffer_set in the mStreamInfo array
1481    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1482            it != mStreamInfo.end(); it++) {
1483        if ((*it)->stream == stream) {
1484            uint32_t numBuffers = buffer_set->num_buffers;
1485            (*it)->buffer_set.stream = buffer_set->stream;
1486            (*it)->buffer_set.num_buffers = numBuffers;
1487            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
1488            if ((*it)->buffer_set.buffers == NULL) {
1489                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
1490                pthread_mutex_unlock(&mMutex);
1491                return -ENOMEM;
1492            }
1493            for (size_t j = 0; j < numBuffers; j++){
1494                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
1495            }
1496            (*it)->registered = 1;
1497        }
1498    }
1499    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
1500    if (rc < 0) {
1501        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
1502        pthread_mutex_unlock(&mMutex);
1503        return -ENODEV;
1504    }
1505
1506    pthread_mutex_unlock(&mMutex);
1507    return NO_ERROR;
1508}
1509
1510/*===========================================================================
1511 * FUNCTION   : processCaptureRequest
1512 *
1513 * DESCRIPTION: process a capture request from camera service
1514 *
1515 * PARAMETERS :
1516 *   @request : request from framework to process
1517 *
1518 * RETURN     :
1519 *
1520 *==========================================================================*/
1521int QCamera3HardwareInterface::processCaptureRequest(
1522                    camera3_capture_request_t *request)
1523{
1524    int rc = NO_ERROR;
1525    int32_t request_id;
1526    CameraMetadata meta;
1527    MetadataBufferInfo reproc_meta;
1528    int queueMetadata = 0;
1529
1530    pthread_mutex_lock(&mMutex);
1531
1532    rc = validateCaptureRequest(request);
1533    if (rc != NO_ERROR) {
1534        ALOGE("%s: incoming request is not valid", __func__);
1535        pthread_mutex_unlock(&mMutex);
1536        return rc;
1537    }
1538
1539    meta = request->settings;
1540
1541    // For first capture request, send capture intent, and
1542    // stream on all streams
1543    if (mFirstRequest) {
1544
1545        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1546            int32_t hal_version = CAM_HAL_V3;
1547            uint8_t captureIntent =
1548                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1549
1550            memset(mParameters, 0, sizeof(parm_buffer_t));
1551            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1552            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1553                sizeof(hal_version), &hal_version);
1554            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1555                sizeof(captureIntent), &captureIntent);
1556            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1557                mParameters);
1558        }
1559
1560        ALOGD("%s: Start META Channel", __func__);
1561        mMetadataChannel->start();
1562
1563        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1564            it != mStreamInfo.end(); it++) {
1565            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1566            ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
1567            channel->start();
1568        }
1569    }
1570
1571    uint32_t frameNumber = request->frame_number;
1572    cam_stream_ID_t streamID;
1573
1574    if (meta.exists(ANDROID_REQUEST_ID)) {
1575        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1576        mCurrentRequestId = request_id;
1577        ALOGV("%s: Received request with id: %d",__func__, request_id);
1578    } else if (mFirstRequest || mCurrentRequestId == -1){
1579        ALOGE("%s: Unable to find request id field, \
1580                & no previous id available", __func__);
1581        return NAME_NOT_FOUND;
1582    } else {
1583        ALOGV("%s: Re-using old request id", __func__);
1584        request_id = mCurrentRequestId;
1585    }
1586
1587    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1588                                    __func__, __LINE__,
1589                                    request->num_output_buffers,
1590                                    request->input_buffer,
1591                                    frameNumber);
1592    // Acquire all request buffers first
1593    streamID.num_streams = 0;
1594    int blob_request = 0;
1595    for (size_t i = 0; i < request->num_output_buffers; i++) {
1596        const camera3_stream_buffer_t& output = request->output_buffers[i];
1597        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1598        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1599
1600        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1601            //Call function to store local copy of jpeg data for encode params.
1602            blob_request = 1;
1603            rc = getJpegSettings(request->settings);
1604            if (rc < 0) {
1605                ALOGE("%s: failed to get jpeg parameters", __func__);
1606                pthread_mutex_unlock(&mMutex);
1607                return rc;
1608            }
1609        }
1610
1611        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1612        if (rc != OK) {
1613            ALOGE("%s: fence wait failed %d", __func__, rc);
1614            pthread_mutex_unlock(&mMutex);
1615            return rc;
1616        }
1617
1618        streamID.streamID[streamID.num_streams] =
1619            channel->getStreamID(channel->getStreamTypeMask());
1620        streamID.num_streams++;
1621    }
1622
1623    rc = setFrameParameters(request, streamID);
1624    if (rc < 0) {
1625        ALOGE("%s: fail to set frame parameters", __func__);
1626        pthread_mutex_unlock(&mMutex);
1627        return rc;
1628    }
1629
1630    /* Update pending request list and pending buffers map */
1631    PendingRequestInfo pendingRequest;
1632    pendingRequest.frame_number = frameNumber;
1633    pendingRequest.num_buffers = request->num_output_buffers;
1634    pendingRequest.request_id = request_id;
1635    pendingRequest.blob_request = blob_request;
1636    pendingRequest.bNotified = 0;
1637    if (blob_request)
1638        pendingRequest.input_jpeg_settings = *mJpegSettings;
1639    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1640
1641    for (size_t i = 0; i < request->num_output_buffers; i++) {
1642        RequestedBufferInfo requestedBuf;
1643        requestedBuf.stream = request->output_buffers[i].stream;
1644        requestedBuf.buffer = NULL;
1645        pendingRequest.buffers.push_back(requestedBuf);
1646
1647        // Add to buffer handle the pending buffers list
1648        PendingBufferInfo bufferInfo;
1649        bufferInfo.frame_number = frameNumber;
1650        bufferInfo.buffer = request->output_buffers[i].buffer;
1651        bufferInfo.stream = request->output_buffers[i].stream;
1652        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1653        mPendingBuffersMap.num_buffers++;
1654        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1655          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1656          bufferInfo.stream->format);
1657    }
1658    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1659          __func__, mPendingBuffersMap.num_buffers);
1660    mPendingRequestsList.push_back(pendingRequest);
1661
1662    // Notify metadata channel we receive a request
1663    mMetadataChannel->request(NULL, frameNumber);
1664
1665    // Call request on other streams
1666    for (size_t i = 0; i < request->num_output_buffers; i++) {
1667        const camera3_stream_buffer_t& output = request->output_buffers[i];
1668        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1669        mm_camera_buf_def_t *pInputBuffer = NULL;
1670
1671        if (channel == NULL) {
1672            ALOGE("%s: invalid channel pointer for stream", __func__);
1673            continue;
1674        }
1675
1676        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1677            QCamera3RegularChannel* inputChannel = NULL;
1678            if(request->input_buffer != NULL){
1679                //Try to get the internal format
1680                inputChannel = (QCamera3RegularChannel*)
1681                    request->input_buffer->stream->priv;
1682                if(inputChannel == NULL ){
1683                    ALOGE("%s: failed to get input channel handle", __func__);
1684                } else {
1685                    pInputBuffer =
1686                        inputChannel->getInternalFormatBuffer(
1687                                request->input_buffer->buffer);
1688                    ALOGD("%s: Input buffer dump",__func__);
1689                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1690                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1691                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1692                    ALOGD("Handle:%p", request->input_buffer->buffer);
1693                    //TODO: need to get corresponding metadata and send it to pproc
1694                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1695                         m != mStoredMetadataList.end(); m++) {
1696                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1697                            reproc_meta.meta_buf = m->meta_buf;
1698                            queueMetadata = 1;
1699                            break;
1700                        }
1701                    }
1702                }
1703            }
1704            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1705                            pInputBuffer,(QCamera3Channel*)inputChannel);
1706            if (queueMetadata) {
1707                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1708            }
1709        } else {
1710            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1711                __LINE__, output.buffer, frameNumber);
1712            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1713                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1714                     m != mStoredMetadataList.end(); m++) {
1715                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1716                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1717                            mMetadataChannel->bufDone(m->meta_buf);
1718                            free(m->meta_buf);
1719                            m = mStoredMetadataList.erase(m);
1720                            break;
1721                        }
1722                   }
1723                }
1724            }
1725            rc = channel->request(output.buffer, frameNumber);
1726        }
1727        if (rc < 0)
1728            ALOGE("%s: request failed", __func__);
1729    }
1730
1731    mFirstRequest = false;
1732    // Added a timed condition wait
1733    struct timespec ts;
1734    uint8_t isValidTimeout = 1;
1735    rc = clock_gettime(CLOCK_REALTIME, &ts);
1736    if (rc < 0) {
1737        isValidTimeout = 0;
1738        ALOGE("%s: Error reading the real time clock!!", __func__);
1739    }
1740    else {
1741        // Make timeout as 5 sec for request to be honored
1742        ts.tv_sec += 5;
1743    }
1744    //Block on conditional variable
1745    mPendingRequest++;
1746    do {
1747        if (!isValidTimeout) {
1748            ALOGV("%s: Blocking on conditional wait", __func__);
1749            pthread_cond_wait(&mRequestCond, &mMutex);
1750        }
1751        else {
1752            ALOGV("%s: Blocking on timed conditional wait", __func__);
1753            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1754            if (rc == ETIMEDOUT) {
1755                rc = -ENODEV;
1756                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1757                break;
1758            }
1759        }
1760        ALOGV("%s: Unblocked", __func__);
1761    }while (mPendingRequest >= kMaxInFlight);
1762
1763    pthread_mutex_unlock(&mMutex);
1764
1765    return rc;
1766}
1767
1768/*===========================================================================
1769 * FUNCTION   : getMetadataVendorTagOps
1770 *
1771 * DESCRIPTION:
1772 *
1773 * PARAMETERS :
1774 *
1775 *
1776 * RETURN     :
1777 *==========================================================================*/
1778void QCamera3HardwareInterface::getMetadataVendorTagOps(
1779                    vendor_tag_query_ops_t* /*ops*/)
1780{
1781    /* Enable locks when we eventually add Vendor Tags */
1782    /*
1783    pthread_mutex_lock(&mMutex);
1784
1785    pthread_mutex_unlock(&mMutex);
1786    */
1787    return;
1788}
1789
1790/*===========================================================================
1791 * FUNCTION   : dump
1792 *
1793 * DESCRIPTION:
1794 *
1795 * PARAMETERS :
1796 *
1797 *
1798 * RETURN     :
1799 *==========================================================================*/
1800void QCamera3HardwareInterface::dump(int /*fd*/)
1801{
1802    /*Enable lock when we implement this function*/
1803    /*
1804    pthread_mutex_lock(&mMutex);
1805
1806    pthread_mutex_unlock(&mMutex);
1807    */
1808    return;
1809}
1810
1811/*===========================================================================
1812 * FUNCTION   : flush
1813 *
1814 * DESCRIPTION:
1815 *
1816 * PARAMETERS :
1817 *
1818 *
1819 * RETURN     :
1820 *==========================================================================*/
1821int QCamera3HardwareInterface::flush()
1822{
1823
1824    unsigned int frameNum = 0;
1825    camera3_notify_msg_t notify_msg;
1826    camera3_capture_result_t result;
1827    camera3_stream_buffer_t pStream_Buf;
1828
1829    ALOGV("%s: Unblocking Process Capture Request", __func__);
1830
1831    // Stop the Streams/Channels
1832    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1833        it != mStreamInfo.end(); it++) {
1834        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1835        channel->stop();
1836        (*it)->status = INVALID;
1837    }
1838
1839    if (mMetadataChannel) {
1840        /* If content of mStreamInfo is not 0, there is metadata stream */
1841        mMetadataChannel->stop();
1842    }
1843
1844    // Mutex Lock
1845    pthread_mutex_lock(&mMutex);
1846
1847    // Unblock process_capture_request
1848    mPendingRequest = 0;
1849    pthread_cond_signal(&mRequestCond);
1850
1851    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1852    frameNum = i->frame_number;
1853    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
1854      __func__, frameNum);
1855
1856    // Go through the pending buffers and send buffer errors
1857    for (List<PendingBufferInfo>::iterator k =
1858         mPendingBuffersMap.mPendingBufferList.begin();
1859         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
1860         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1861          __func__, k->frame_number, k->buffer, k->stream,
1862          k->stream->format);
1863
1864        if (k->frame_number < frameNum) {
1865            // Send Error notify to frameworks for each buffer for which
1866            // metadata buffer is already sent
1867            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
1868              __func__, k->frame_number, k->buffer);
1869
1870            notify_msg.type = CAMERA3_MSG_ERROR;
1871            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1872            notify_msg.message.error.error_stream = k->stream;
1873            notify_msg.message.error.frame_number = k->frame_number;
1874            mCallbackOps->notify(mCallbackOps, &notify_msg);
1875            ALOGV("%s: notify frame_number = %d", __func__,
1876                    i->frame_number);
1877
1878            pStream_Buf.acquire_fence = -1;
1879            pStream_Buf.release_fence = -1;
1880            pStream_Buf.buffer = k->buffer;
1881            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1882            pStream_Buf.stream = k->stream;
1883
1884            result.result = NULL;
1885            result.frame_number = k->frame_number;
1886            result.num_output_buffers = 1;
1887            result.output_buffers = &pStream_Buf ;
1888            mCallbackOps->process_capture_result(mCallbackOps, &result);
1889
1890            mPendingBuffersMap.num_buffers--;
1891            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1892        }
1893        else {
1894          k++;
1895        }
1896    }
1897
1898    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1899
1900    // Go through the pending requests info and send error request to framework
1901    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
1902        int numBuffers = 0;
1903        ALOGV("%s:Sending ERROR REQUEST for frame %d",
1904              __func__, i->frame_number);
1905
1906        // Send shutter notify to frameworks
1907        notify_msg.type = CAMERA3_MSG_ERROR;
1908        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
1909        notify_msg.message.error.error_stream = NULL;
1910        notify_msg.message.error.frame_number = i->frame_number;
1911        mCallbackOps->notify(mCallbackOps, &notify_msg);
1912
1913        result.frame_number = i->frame_number;
1914        result.num_output_buffers = 0;
1915        result.output_buffers = NULL;
1916        numBuffers = 0;
1917
1918        for (List<PendingBufferInfo>::iterator k =
1919             mPendingBuffersMap.mPendingBufferList.begin();
1920             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
1921          if (k->frame_number == i->frame_number) {
1922            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
1923                   " stream = %p, stream format = %d",__func__,
1924                   k->frame_number, k->buffer, k->stream, k->stream->format);
1925
1926            pStream_Buf.acquire_fence = -1;
1927            pStream_Buf.release_fence = -1;
1928            pStream_Buf.buffer = k->buffer;
1929            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1930            pStream_Buf.stream = k->stream;
1931
1932            result.num_output_buffers = 1;
1933            result.output_buffers = &pStream_Buf;
1934            result.result = NULL;
1935            result.frame_number = i->frame_number;
1936
1937            mCallbackOps->process_capture_result(mCallbackOps, &result);
1938            mPendingBuffersMap.num_buffers--;
1939            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1940            numBuffers++;
1941          }
1942          else {
1943            k++;
1944          }
1945        }
1946        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1947              __func__, mPendingBuffersMap.num_buffers);
1948
1949        i = mPendingRequestsList.erase(i);
1950    }
1951
1952    /* Reset pending buffer list and requests list */
1953    mPendingRequestsList.clear();
1954    /* Reset pending frame Drop list and requests list */
1955    mPendingFrameDropList.clear();
1956
1957    mPendingBuffersMap.num_buffers = 0;
1958    mPendingBuffersMap.mPendingBufferList.clear();
1959    ALOGV("%s: Cleared all the pending buffers ", __func__);
1960
1961    /*flush the metadata list*/
1962    if (!mStoredMetadataList.empty()) {
1963        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1964              m != mStoredMetadataList.end(); ) {
1965            mMetadataChannel->bufDone(m->meta_buf);
1966            free(m->meta_buf);
1967            m = mStoredMetadataList.erase(m);
1968        }
1969    }
1970    ALOGV("%s: Flushing the metadata list done!! ", __func__);
1971
1972    mFirstRequest = true;
1973    pthread_mutex_unlock(&mMutex);
1974    return 0;
1975}
1976
1977/*===========================================================================
1978 * FUNCTION   : captureResultCb
1979 *
1980 * DESCRIPTION: Callback handler for all capture result
1981 *              (streams, as well as metadata)
1982 *
1983 * PARAMETERS :
1984 *   @metadata : metadata information
1985 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1986 *               NULL if metadata.
1987 *
1988 * RETURN     : NONE
1989 *==========================================================================*/
1990void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1991                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1992{
1993    pthread_mutex_lock(&mMutex);
1994
1995    if (metadata_buf)
1996        handleMetadataWithLock(metadata_buf);
1997    else
1998        handleBufferWithLock(buffer, frame_number);
1999
2000    pthread_mutex_unlock(&mMutex);
2001    return;
2002}
2003
2004/*===========================================================================
2005 * FUNCTION   : translateCbMetadataToResultMetadata
2006 *
2007 * DESCRIPTION:
2008 *
2009 * PARAMETERS :
2010 *   @metadata : metadata information from callback
2011 *
2012 * RETURN     : camera_metadata_t*
2013 *              metadata in a format specified by fwk
2014 *==========================================================================*/
2015camera_metadata_t*
2016QCamera3HardwareInterface::translateCbMetadataToResultMetadata
2017                                (metadata_buffer_t *metadata, nsecs_t timestamp,
2018                                 int32_t request_id, int32_t BlobRequest,
2019                                 jpeg_settings_t* inputjpegsettings,
2020                                 uint32_t frameNumber)
2021{
2022    CameraMetadata camMetadata;
2023    camera_metadata_t* resultMetadata;
2024
2025    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
2026    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
2027
2028    // Update the JPEG related info
2029    if (BlobRequest) {
2030        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
2031        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
2032
2033        int32_t thumbnailSizeTable[2];
2034        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
2035        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
2036        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
2037        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
2038               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
2039
2040        if (inputjpegsettings->gps_coordinates[0]) {
2041            double gpsCoordinates[3];
2042            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
2043            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
2044            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
2045            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
2046            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
2047                 gpsCoordinates[1],gpsCoordinates[2]);
2048        }
2049
2050        if (inputjpegsettings->gps_timestamp) {
2051            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
2052            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
2053        }
2054
2055        String8 str(inputjpegsettings->gps_processing_method);
2056        if (strlen(mJpegSettings->gps_processing_method) > 0) {
2057            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2058        }
2059
2060        //Dump tuning metadata if enabled and available
2061        char prop[PROPERTY_VALUE_MAX];
2062        memset(prop, 0, sizeof(prop));
2063        property_get("persist.camera.dumpmetadata", prop, "0");
2064        int32_t enabled = atoi(prop);
2065        if (enabled && metadata->is_tuning_params_valid) {
2066            dumpMetadataToFile(metadata->tuning_params,
2067                               mMetaFrameCount,
2068                               enabled,
2069                               "Snapshot",
2070                               frameNumber);
2071        }
2072    }
2073    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2074    uint8_t next_entry;
2075    while (curr_entry != CAM_INTF_PARM_MAX) {
2076       switch (curr_entry) {
2077         case CAM_INTF_META_FACE_DETECTION:{
2078             cam_face_detection_data_t *faceDetectionInfo =
2079                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
2080             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
2081             int32_t faceIds[MAX_ROI];
2082             uint8_t faceScores[MAX_ROI];
2083             int32_t faceRectangles[MAX_ROI * 4];
2084             int32_t faceLandmarks[MAX_ROI * 6];
2085             int j = 0, k = 0;
2086             for (int i = 0; i < numFaces; i++) {
2087                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
2088                 faceScores[i] = faceDetectionInfo->faces[i].score;
2089                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
2090                         faceRectangles+j, -1);
2091                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
2092                 j+= 4;
2093                 k+= 6;
2094             }
2095
2096             if (numFaces <= 0) {
2097                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
2098                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
2099                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
2100                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
2101             }
2102
2103             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
2104             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
2105             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
2106               faceRectangles, numFaces*4);
2107             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
2108               faceLandmarks, numFaces*6);
2109
2110            break;
2111            }
2112         case CAM_INTF_META_COLOR_CORRECT_MODE:{
2113             uint8_t  *color_correct_mode =
2114                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2115             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2116             break;
2117          }
2118
2119         // 3A state is sent in urgent partial result (uses quirk)
2120         case CAM_INTF_META_AEC_PRECAPTURE_ID:
2121         case CAM_INTF_META_AEC_ROI:
2122         case CAM_INTF_META_AEC_STATE:
2123         case CAM_INTF_PARM_FOCUS_MODE:
2124         case CAM_INTF_META_AF_ROI:
2125         case CAM_INTF_META_AF_STATE:
2126         case CAM_INTF_META_AF_TRIGGER_ID:
2127         case CAM_INTF_PARM_WHITE_BALANCE:
2128         case CAM_INTF_META_AWB_REGIONS:
2129         case CAM_INTF_META_AWB_STATE:
2130         case CAM_INTF_META_MODE: {
2131           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
2132           break;
2133         }
2134
2135          case CAM_INTF_META_EDGE_MODE: {
2136             cam_edge_application_t  *edgeApplication =
2137                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
2138             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2139             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2140             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2141             break;
2142          }
2143          case CAM_INTF_META_FLASH_POWER: {
2144             uint8_t  *flashPower =
2145                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2146             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2147             break;
2148          }
2149          case CAM_INTF_META_FLASH_FIRING_TIME: {
2150             int64_t  *flashFiringTime =
2151                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2152             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2153             break;
2154          }
2155          case CAM_INTF_META_FLASH_STATE: {
2156             uint8_t  *flashState =
2157                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
2158             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
2159             break;
2160          }
2161          case CAM_INTF_META_FLASH_MODE:{
2162             uint8_t *flashMode = (uint8_t*)
2163                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
2164             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
2165             break;
2166          }
2167          case CAM_INTF_META_HOTPIXEL_MODE: {
2168              uint8_t  *hotPixelMode =
2169                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2170              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2171              break;
2172          }
2173          case CAM_INTF_META_LENS_APERTURE:{
2174             float  *lensAperture =
2175                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2176             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2177             break;
2178          }
2179          case CAM_INTF_META_LENS_FILTERDENSITY: {
2180             float  *filterDensity =
2181                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2182             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2183             break;
2184          }
2185          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2186             float  *focalLength =
2187                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2188             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2189             break;
2190          }
2191          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2192             float  *focusDistance =
2193                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2194             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2195             break;
2196          }
2197          case CAM_INTF_META_LENS_FOCUS_RANGE: {
2198             float  *focusRange =
2199                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2200             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2201             break;
2202          }
2203          case CAM_INTF_META_LENS_STATE: {
2204             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2205             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2206             break;
2207          }
2208          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2209             uint8_t  *opticalStab =
2210                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2211             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2212             break;
2213          }
2214          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2215             uint8_t  *noiseRedMode =
2216                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2217             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2218             break;
2219          }
2220          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2221             uint8_t  *noiseRedStrength =
2222                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2223             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2224             break;
2225          }
2226          case CAM_INTF_META_SCALER_CROP_REGION: {
2227             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2228             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2229             int32_t scalerCropRegion[4];
2230             scalerCropRegion[0] = hScalerCropRegion->left;
2231             scalerCropRegion[1] = hScalerCropRegion->top;
2232             scalerCropRegion[2] = hScalerCropRegion->width;
2233             scalerCropRegion[3] = hScalerCropRegion->height;
2234             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2235             break;
2236          }
2237          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2238             int64_t  *sensorExpTime =
2239                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2240             mMetadataResponse.exposure_time = *sensorExpTime;
2241             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2242             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2243             break;
2244          }
2245          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2246             int64_t  *sensorFameDuration =
2247                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2248             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2249             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2250             break;
2251          }
2252          case CAM_INTF_META_SENSOR_SENSITIVITY:{
2253             int32_t  *sensorSensitivity =
2254                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2255             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
2256             mMetadataResponse.iso_speed = *sensorSensitivity;
2257             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
2258             break;
2259          }
2260          case CAM_INTF_META_SHADING_MODE: {
2261             uint8_t  *shadingMode =
2262                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2263             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2264             break;
2265          }
2266          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2267             uint8_t  *faceDetectMode =
2268                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2269             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2270                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2271                                                        *faceDetectMode);
2272             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2273             break;
2274          }
2275          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2276             uint8_t  *histogramMode =
2277                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2278             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2279             break;
2280          }
2281          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2282               uint8_t  *sharpnessMapMode =
2283                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2284               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2285                                  sharpnessMapMode, 1);
2286               break;
2287           }
2288          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2289               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2290               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2291               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2292                                  (int32_t*)sharpnessMap->sharpness,
2293                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2294               break;
2295          }
2296          case CAM_INTF_META_LENS_SHADING_MAP: {
2297               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2298               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2299               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2300               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2301               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2302                                  (float*)lensShadingMap->lens_shading,
2303                                  4*map_width*map_height);
2304               break;
2305          }
2306
2307          case CAM_INTF_META_TONEMAP_MODE: {
2308             uint8_t  *toneMapMode =
2309                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2310             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2311             break;
2312          }
2313
2314          case CAM_INTF_META_TONEMAP_CURVES:{
2315             //Populate CAM_INTF_META_TONEMAP_CURVES
2316             /* ch0 = G, ch 1 = B, ch 2 = R*/
2317             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2318             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2319             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2320                                (float*)tonemap->curves[0].tonemap_points,
2321                                tonemap->tonemap_points_cnt * 2);
2322
2323             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2324                                (float*)tonemap->curves[1].tonemap_points,
2325                                tonemap->tonemap_points_cnt * 2);
2326
2327             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2328                                (float*)tonemap->curves[2].tonemap_points,
2329                                tonemap->tonemap_points_cnt * 2);
2330             break;
2331          }
2332          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2333             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2334             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2335             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2336             break;
2337          }
2338          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2339              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2340              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2341              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2342                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2343              break;
2344          }
2345          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2346             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2347             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2348             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2349                       predColorCorrectionGains->gains, 4);
2350             break;
2351          }
2352          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2353             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2354                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2355             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2356                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2357             break;
2358
2359          }
2360          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2361             uint8_t *blackLevelLock = (uint8_t*)
2362               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2363             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2364             break;
2365          }
2366          case CAM_INTF_META_SCENE_FLICKER:{
2367             uint8_t *sceneFlicker = (uint8_t*)
2368             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2369             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2370             break;
2371          }
2372          case CAM_INTF_PARM_LED_MODE:
2373             break;
2374          case CAM_INTF_PARM_EFFECT: {
2375             uint8_t *effectMode = (uint8_t*)
2376                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2377             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2378                                                    sizeof(EFFECT_MODES_MAP),
2379                                                    *effectMode);
2380             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2381             break;
2382          }
2383          case CAM_INTF_META_TEST_PATTERN_DATA: {
2384             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2385                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2386             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2387                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2388                     testPatternData->mode);
2389             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2390                     &fwk_testPatternMode, 1);
2391             break;
2392          }
2393          default:
2394             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2395                   __func__, curr_entry);
2396             break;
2397       }
2398       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2399       curr_entry = next_entry;
2400    }
2401
2402    int32_t hotPixelMap[2];
2403    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2404
2405    resultMetadata = camMetadata.release();
2406    return resultMetadata;
2407}
2408
2409/*===========================================================================
2410 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2411 *
2412 * DESCRIPTION:
2413 *
2414 * PARAMETERS :
2415 *   @metadata : metadata information from callback
2416 *
2417 * RETURN     : camera_metadata_t*
2418 *              metadata in a format specified by fwk
2419 *==========================================================================*/
2420camera_metadata_t*
2421QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2422                                (metadata_buffer_t *metadata) {
2423
2424    CameraMetadata camMetadata;
2425    camera_metadata_t* resultMetadata;
2426
2427    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2428    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2429
2430    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2431    uint8_t next_entry;
2432    while (curr_entry != CAM_INTF_PARM_MAX) {
2433      switch (curr_entry) {
2434        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2435            int32_t  *ae_precapture_id =
2436              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2437            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2438                                          ae_precapture_id, 1);
2439            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2440          break;
2441        }
2442        case CAM_INTF_META_AEC_ROI: {
2443            cam_area_t  *hAeRegions =
2444                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2445            int32_t aeRegions[5];
2446            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2447            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2448            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
2449            break;
2450        }
2451        case CAM_INTF_META_AEC_STATE:{
2452            uint8_t *ae_state =
2453                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2454            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2455            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2456            break;
2457        }
2458        case CAM_INTF_PARM_FOCUS_MODE:{
2459            uint8_t  *focusMode =
2460                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2461            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2462               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2463            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2464            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2465            break;
2466        }
2467        case CAM_INTF_META_AF_ROI:{
2468            /*af regions*/
2469            cam_area_t  *hAfRegions =
2470                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2471            int32_t afRegions[5];
2472            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2473            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2474            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
2475            break;
2476        }
2477        case CAM_INTF_META_AF_STATE: {
2478            uint8_t  *afState =
2479               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2480            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2481            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2482            break;
2483        }
2484        case CAM_INTF_META_AF_TRIGGER_ID: {
2485            int32_t  *afTriggerId =
2486                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2487            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2488            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2489            break;
2490        }
2491        case CAM_INTF_PARM_WHITE_BALANCE: {
2492           uint8_t  *whiteBalance =
2493                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2494             uint8_t fwkWhiteBalanceMode =
2495                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2496                    sizeof(WHITE_BALANCE_MODES_MAP)/
2497                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2498             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2499                 &fwkWhiteBalanceMode, 1);
2500            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2501             break;
2502        }
2503        case CAM_INTF_META_AWB_REGIONS: {
2504           /*awb regions*/
2505           cam_area_t  *hAwbRegions =
2506               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2507           int32_t awbRegions[5];
2508           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2509           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2510           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2511           break;
2512        }
2513        case CAM_INTF_META_AWB_STATE: {
2514           uint8_t  *whiteBalanceState =
2515              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2516           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2517           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2518           break;
2519        }
2520        case CAM_INTF_META_MODE: {
2521            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2522            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2523            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
2524            break;
2525        }
2526        default:
2527            ALOGV("%s: Normal Metadata %d, do not process",
2528              __func__, curr_entry);
2529       }
2530       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2531       curr_entry = next_entry;
2532    }
2533    resultMetadata = camMetadata.release();
2534    return resultMetadata;
2535}
2536
2537/*===========================================================================
2538 * FUNCTION   : dumpMetadataToFile
2539 *
2540 * DESCRIPTION: Dumps tuning metadata to file system
2541 *
2542 * PARAMETERS :
2543 *   @meta           : tuning metadata
2544 *   @dumpFrameCount : current dump frame count
2545 *   @enabled        : Enable mask
2546 *
2547 *==========================================================================*/
2548void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
2549                                                   uint32_t &dumpFrameCount,
2550                                                   int32_t enabled,
2551                                                   const char *type,
2552                                                   uint32_t frameNumber)
2553{
2554    uint32_t frm_num = 0;
2555
2556    //Some sanity checks
2557    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
2558        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
2559              __func__,
2560              meta.tuning_sensor_data_size,
2561              TUNING_SENSOR_DATA_MAX);
2562        return;
2563    }
2564
2565    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
2566        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
2567              __func__,
2568              meta.tuning_vfe_data_size,
2569              TUNING_VFE_DATA_MAX);
2570        return;
2571    }
2572
2573    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
2574        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
2575              __func__,
2576              meta.tuning_cpp_data_size,
2577              TUNING_CPP_DATA_MAX);
2578        return;
2579    }
2580
2581    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
2582        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
2583              __func__,
2584              meta.tuning_cac_data_size,
2585              TUNING_CAC_DATA_MAX);
2586        return;
2587    }
2588    //
2589
2590    if(enabled){
2591        frm_num = ((enabled & 0xffff0000) >> 16);
2592        if(frm_num == 0) {
2593            frm_num = 10; //default 10 frames
2594        }
2595        if(frm_num > 256) {
2596            frm_num = 256; //256 buffers cycle around
2597        }
2598        if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
2599            // reset frame count if cycling
2600            dumpFrameCount = 0;
2601        }
2602        ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
2603        if (dumpFrameCount < frm_num) {
2604            char timeBuf[FILENAME_MAX];
2605            char buf[FILENAME_MAX];
2606            memset(buf, 0, sizeof(buf));
2607            memset(timeBuf, 0, sizeof(timeBuf));
2608            time_t current_time;
2609            struct tm * timeinfo;
2610            time (&current_time);
2611            timeinfo = localtime (&current_time);
2612            strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
2613            String8 filePath(timeBuf);
2614            snprintf(buf,
2615                     sizeof(buf),
2616                     "%d_HAL_META_%s_%d.bin",
2617                     dumpFrameCount,
2618                     type,
2619                     frameNumber);
2620            filePath.append(buf);
2621            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2622            if (file_fd > 0) {
2623                int written_len = 0;
2624                meta.tuning_data_version = TUNING_DATA_VERSION;
2625                void *data = (void *)((uint8_t *)&meta.tuning_data_version);
2626                written_len += write(file_fd, data, sizeof(uint32_t));
2627                data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
2628                ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
2629                written_len += write(file_fd, data, sizeof(uint32_t));
2630                data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
2631                ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
2632                written_len += write(file_fd, data, sizeof(uint32_t));
2633                data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
2634                ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
2635                written_len += write(file_fd, data, sizeof(uint32_t));
2636                data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
2637                ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
2638                written_len += write(file_fd, data, sizeof(uint32_t));
2639                int total_size = meta.tuning_sensor_data_size;
2640                data = (void *)((uint8_t *)&meta.data);
2641                written_len += write(file_fd, data, total_size);
2642                total_size = meta.tuning_vfe_data_size;
2643                data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
2644                written_len += write(file_fd, data, total_size);
2645                total_size = meta.tuning_cpp_data_size;
2646                data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
2647                written_len += write(file_fd, data, total_size);
2648                total_size = meta.tuning_cac_data_size;
2649                data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
2650                written_len += write(file_fd, data, total_size);
2651                close(file_fd);
2652            }else {
2653                ALOGE("%s: fail t open file for image dumping", __func__);
2654            }
2655            dumpFrameCount++;
2656        }
2657    }
2658}
2659
2660/*===========================================================================
2661 * FUNCTION   : convertToRegions
2662 *
2663 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2664 *
2665 * PARAMETERS :
2666 *   @rect   : cam_rect_t struct to convert
2667 *   @region : int32_t destination array
2668 *   @weight : if we are converting from cam_area_t, weight is valid
2669 *             else weight = -1
2670 *
2671 *==========================================================================*/
2672void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2673    region[0] = rect.left;
2674    region[1] = rect.top;
2675    region[2] = rect.left + rect.width;
2676    region[3] = rect.top + rect.height;
2677    if (weight > -1) {
2678        region[4] = weight;
2679    }
2680}
2681
2682/*===========================================================================
2683 * FUNCTION   : convertFromRegions
2684 *
2685 * DESCRIPTION: helper method to convert from array to cam_rect_t
2686 *
2687 * PARAMETERS :
2688 *   @rect   : cam_rect_t struct to convert
2689 *   @region : int32_t destination array
2690 *   @weight : if we are converting from cam_area_t, weight is valid
2691 *             else weight = -1
2692 *
2693 *==========================================================================*/
2694void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2695                                                   const camera_metadata_t *settings,
2696                                                   uint32_t tag){
2697    CameraMetadata frame_settings;
2698    frame_settings = settings;
2699    int32_t x_min = frame_settings.find(tag).data.i32[0];
2700    int32_t y_min = frame_settings.find(tag).data.i32[1];
2701    int32_t x_max = frame_settings.find(tag).data.i32[2];
2702    int32_t y_max = frame_settings.find(tag).data.i32[3];
2703    roi->weight = frame_settings.find(tag).data.i32[4];
2704    roi->rect.left = x_min;
2705    roi->rect.top = y_min;
2706    roi->rect.width = x_max - x_min;
2707    roi->rect.height = y_max - y_min;
2708}
2709
2710/*===========================================================================
2711 * FUNCTION   : resetIfNeededROI
2712 *
2713 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2714 *              crop region
2715 *
2716 * PARAMETERS :
2717 *   @roi       : cam_area_t struct to resize
2718 *   @scalerCropRegion : cam_crop_region_t region to compare against
2719 *
2720 *
2721 *==========================================================================*/
2722bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2723                                                 const cam_crop_region_t* scalerCropRegion)
2724{
2725    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2726    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2727    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2728    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2729    if ((roi_x_max < scalerCropRegion->left) ||
2730        (roi_y_max < scalerCropRegion->top)  ||
2731        (roi->rect.left > crop_x_max) ||
2732        (roi->rect.top > crop_y_max)){
2733        return false;
2734    }
2735    if (roi->rect.left < scalerCropRegion->left) {
2736        roi->rect.left = scalerCropRegion->left;
2737    }
2738    if (roi->rect.top < scalerCropRegion->top) {
2739        roi->rect.top = scalerCropRegion->top;
2740    }
2741    if (roi_x_max > crop_x_max) {
2742        roi_x_max = crop_x_max;
2743    }
2744    if (roi_y_max > crop_y_max) {
2745        roi_y_max = crop_y_max;
2746    }
2747    roi->rect.width = roi_x_max - roi->rect.left;
2748    roi->rect.height = roi_y_max - roi->rect.top;
2749    return true;
2750}
2751
2752/*===========================================================================
2753 * FUNCTION   : convertLandmarks
2754 *
2755 * DESCRIPTION: helper method to extract the landmarks from face detection info
2756 *
2757 * PARAMETERS :
2758 *   @face   : cam_rect_t struct to convert
2759 *   @landmarks : int32_t destination array
2760 *
2761 *
2762 *==========================================================================*/
2763void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2764{
2765    landmarks[0] = face.left_eye_center.x;
2766    landmarks[1] = face.left_eye_center.y;
2767    landmarks[2] = face.right_eye_center.x;
2768    landmarks[3] = face.right_eye_center.y;
2769    landmarks[4] = face.mouth_center.x;
2770    landmarks[5] = face.mouth_center.y;
2771}
2772
2773#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2774/*===========================================================================
2775 * FUNCTION   : initCapabilities
2776 *
2777 * DESCRIPTION: initialize camera capabilities in static data struct
2778 *
2779 * PARAMETERS :
2780 *   @cameraId  : camera Id
2781 *
2782 * RETURN     : int32_t type of status
2783 *              NO_ERROR  -- success
2784 *              none-zero failure code
2785 *==========================================================================*/
2786int QCamera3HardwareInterface::initCapabilities(int cameraId)
2787{
2788    int rc = 0;
2789    mm_camera_vtbl_t *cameraHandle = NULL;
2790    QCamera3HeapMemory *capabilityHeap = NULL;
2791
2792    cameraHandle = camera_open(cameraId);
2793    if (!cameraHandle) {
2794        ALOGE("%s: camera_open failed", __func__);
2795        rc = -1;
2796        goto open_failed;
2797    }
2798
2799    capabilityHeap = new QCamera3HeapMemory();
2800    if (capabilityHeap == NULL) {
2801        ALOGE("%s: creation of capabilityHeap failed", __func__);
2802        goto heap_creation_failed;
2803    }
2804    /* Allocate memory for capability buffer */
2805    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2806    if(rc != OK) {
2807        ALOGE("%s: No memory for cappability", __func__);
2808        goto allocate_failed;
2809    }
2810
2811    /* Map memory for capability buffer */
2812    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2813    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2814                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2815                                capabilityHeap->getFd(0),
2816                                sizeof(cam_capability_t));
2817    if(rc < 0) {
2818        ALOGE("%s: failed to map capability buffer", __func__);
2819        goto map_failed;
2820    }
2821
2822    /* Query Capability */
2823    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2824    if(rc < 0) {
2825        ALOGE("%s: failed to query capability",__func__);
2826        goto query_failed;
2827    }
2828    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2829    if (!gCamCapability[cameraId]) {
2830        ALOGE("%s: out of memory", __func__);
2831        goto query_failed;
2832    }
2833    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2834                                        sizeof(cam_capability_t));
2835    rc = 0;
2836
2837query_failed:
2838    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2839                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2840map_failed:
2841    capabilityHeap->deallocate();
2842allocate_failed:
2843    delete capabilityHeap;
2844heap_creation_failed:
2845    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2846    cameraHandle = NULL;
2847open_failed:
2848    return rc;
2849}
2850
2851/*===========================================================================
2852 * FUNCTION   : initParameters
2853 *
2854 * DESCRIPTION: initialize camera parameters
2855 *
2856 * PARAMETERS :
2857 *
2858 * RETURN     : int32_t type of status
2859 *              NO_ERROR  -- success
2860 *              none-zero failure code
2861 *==========================================================================*/
2862int QCamera3HardwareInterface::initParameters()
2863{
2864    int rc = 0;
2865
2866    //Allocate Set Param Buffer
2867    mParamHeap = new QCamera3HeapMemory();
2868    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2869    if(rc != OK) {
2870        rc = NO_MEMORY;
2871        ALOGE("Failed to allocate SETPARM Heap memory");
2872        delete mParamHeap;
2873        mParamHeap = NULL;
2874        return rc;
2875    }
2876
2877    //Map memory for parameters buffer
2878    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2879            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2880            mParamHeap->getFd(0),
2881            sizeof(parm_buffer_t));
2882    if(rc < 0) {
2883        ALOGE("%s:failed to map SETPARM buffer",__func__);
2884        rc = FAILED_TRANSACTION;
2885        mParamHeap->deallocate();
2886        delete mParamHeap;
2887        mParamHeap = NULL;
2888        return rc;
2889    }
2890
2891    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2892    return rc;
2893}
2894
2895/*===========================================================================
2896 * FUNCTION   : deinitParameters
2897 *
2898 * DESCRIPTION: de-initialize camera parameters
2899 *
2900 * PARAMETERS :
2901 *
2902 * RETURN     : NONE
2903 *==========================================================================*/
2904void QCamera3HardwareInterface::deinitParameters()
2905{
2906    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2907            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2908
2909    mParamHeap->deallocate();
2910    delete mParamHeap;
2911    mParamHeap = NULL;
2912
2913    mParameters = NULL;
2914}
2915
2916/*===========================================================================
2917 * FUNCTION   : calcMaxJpegSize
2918 *
2919 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2920 *
2921 * PARAMETERS :
2922 *
2923 * RETURN     : max_jpeg_size
2924 *==========================================================================*/
2925int QCamera3HardwareInterface::calcMaxJpegSize()
2926{
2927    int32_t max_jpeg_size = 0;
2928    int temp_width, temp_height;
2929    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2930        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2931        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2932        if (temp_width * temp_height > max_jpeg_size ) {
2933            max_jpeg_size = temp_width * temp_height;
2934        }
2935    }
2936    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2937    return max_jpeg_size;
2938}
2939
2940/*===========================================================================
2941 * FUNCTION   : initStaticMetadata
2942 *
2943 * DESCRIPTION: initialize the static metadata
2944 *
2945 * PARAMETERS :
2946 *   @cameraId  : camera Id
2947 *
2948 * RETURN     : int32_t type of status
2949 *              0  -- success
2950 *              non-zero failure code
2951 *==========================================================================*/
2952int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2953{
2954    int rc = 0;
2955    CameraMetadata staticInfo;
2956
2957    /* android.info: hardware level */
2958    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2959    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2960        &supportedHardwareLevel, 1);
2961
2962    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2963    /*HAL 3 only*/
2964    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2965                    &gCamCapability[cameraId]->min_focus_distance, 1);
2966
2967    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2968                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2969
2970    /*should be using focal lengths but sensor doesn't provide that info now*/
2971    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2972                      &gCamCapability[cameraId]->focal_length,
2973                      1);
2974
2975    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2976                      gCamCapability[cameraId]->apertures,
2977                      gCamCapability[cameraId]->apertures_count);
2978
2979    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2980                gCamCapability[cameraId]->filter_densities,
2981                gCamCapability[cameraId]->filter_densities_count);
2982
2983
2984    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2985                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2986                      gCamCapability[cameraId]->optical_stab_modes_count);
2987
2988    staticInfo.update(ANDROID_LENS_POSITION,
2989                      gCamCapability[cameraId]->lens_position,
2990                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2991
2992    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2993                                       gCamCapability[cameraId]->lens_shading_map_size.height};
2994    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2995                      lens_shading_map_size,
2996                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2997
2998    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2999            gCamCapability[cameraId]->sensor_physical_size, 2);
3000
3001    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
3002            gCamCapability[cameraId]->exposure_time_range, 2);
3003
3004    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3005            &gCamCapability[cameraId]->max_frame_duration, 1);
3006
3007    camera_metadata_rational baseGainFactor = {
3008            gCamCapability[cameraId]->base_gain_factor.numerator,
3009            gCamCapability[cameraId]->base_gain_factor.denominator};
3010    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
3011                      &baseGainFactor, 1);
3012
3013    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3014                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
3015
3016    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
3017                                  gCamCapability[cameraId]->pixel_array_size.height};
3018    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3019                      pixel_array_size, 2);
3020
3021    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
3022                                                gCamCapability[cameraId]->active_array_size.top,
3023                                                gCamCapability[cameraId]->active_array_size.width,
3024                                                gCamCapability[cameraId]->active_array_size.height};
3025    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3026                      active_array_size, 4);
3027
3028    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
3029            &gCamCapability[cameraId]->white_level, 1);
3030
3031    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
3032            gCamCapability[cameraId]->black_level_pattern, 4);
3033
3034    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
3035                      &gCamCapability[cameraId]->flash_charge_duration, 1);
3036
3037    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
3038                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
3039
3040    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
3041    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
3042                      (int32_t*)&maxFaces, 1);
3043
3044    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3045                      &gCamCapability[cameraId]->histogram_size, 1);
3046
3047    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3048            &gCamCapability[cameraId]->max_histogram_count, 1);
3049
3050    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
3051                                    gCamCapability[cameraId]->sharpness_map_size.height};
3052
3053    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
3054            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
3055
3056    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3057            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
3058
3059
3060    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
3061                      &gCamCapability[cameraId]->raw_min_duration[0],
3062                       gCamCapability[cameraId]->supported_raw_dim_cnt);
3063
3064    int32_t scalar_formats[] = {
3065            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
3066            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
3067            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
3068            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
3069            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
3070    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
3071    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
3072                      scalar_formats,
3073                      scalar_formats_count);
3074
3075    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
3076    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
3077              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
3078              available_processed_sizes);
3079    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
3080                available_processed_sizes,
3081                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
3082
3083    int32_t available_raw_sizes[CAM_FORMAT_MAX * 2];
3084    makeTable(gCamCapability[cameraId]->raw_dim,
3085              gCamCapability[cameraId]->supported_raw_dim_cnt,
3086              available_raw_sizes);
3087    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
3088                available_raw_sizes,
3089                gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
3090
3091    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
3092                      &gCamCapability[cameraId]->picture_min_duration[0],
3093                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
3094
3095    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
3096    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
3097                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
3098                 available_fps_ranges);
3099    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3100            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
3101
3102    camera_metadata_rational exposureCompensationStep = {
3103            gCamCapability[cameraId]->exp_compensation_step.numerator,
3104            gCamCapability[cameraId]->exp_compensation_step.denominator};
3105    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
3106                      &exposureCompensationStep, 1);
3107
3108    /*TO DO*/
3109    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
3110    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3111                      availableVstabModes, sizeof(availableVstabModes));
3112
3113    /** Quirk for urgent 3A state until final interface is worked out */
3114    uint8_t usePartialResultQuirk = 1;
3115    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
3116                      &usePartialResultQuirk, 1);
3117
3118    /*HAL 1 and HAL 3 common*/
3119    float maxZoom = 4;
3120    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3121            &maxZoom, 1);
3122
3123    int32_t max3aRegions[] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
3124    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
3125            max3aRegions, 3);
3126
3127    uint8_t availableFaceDetectModes[] = {
3128            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
3129            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
3130    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3131                      availableFaceDetectModes,
3132                      sizeof(availableFaceDetectModes));
3133
3134    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
3135                                           gCamCapability[cameraId]->exposure_compensation_max};
3136    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
3137            exposureCompensationRange,
3138            sizeof(exposureCompensationRange)/sizeof(int32_t));
3139
3140    uint8_t lensFacing = (facingBack) ?
3141            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
3142    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
3143
3144    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
3145                available_processed_sizes,
3146                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
3147
3148    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3149                      available_thumbnail_sizes,
3150                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
3151
3152    /*all sizes will be clubbed into this tag*/
3153    int32_t available_stream_configs_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3154                                    sizeof(scalar_formats)/sizeof(int32_t) * 4;
3155    int32_t available_stream_configs[available_stream_configs_size];
3156    int idx = 0;
3157    for (int j = 0; j < scalar_formats_count; j++) {
3158        for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3159           available_stream_configs[idx] = scalar_formats[j];
3160           available_stream_configs[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3161           available_stream_configs[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3162           available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3163           idx+=4;
3164        }
3165    }
3166
3167    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3168                      available_stream_configs,
3169                      available_stream_configs_size);
3170
3171
3172
3173    int32_t max_jpeg_size = 0;
3174    int temp_width, temp_height;
3175    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3176        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3177        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3178        if (temp_width * temp_height > max_jpeg_size ) {
3179            max_jpeg_size = temp_width * temp_height;
3180        }
3181    }
3182    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3183    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
3184                      &max_jpeg_size, 1);
3185
3186    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
3187    size_t size = 0;
3188    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
3189        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
3190                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
3191                                   gCamCapability[cameraId]->supported_effects[i]);
3192        if (val != NAME_NOT_FOUND) {
3193            avail_effects[size] = (uint8_t)val;
3194            size++;
3195        }
3196    }
3197    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
3198                      avail_effects,
3199                      size);
3200
3201    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
3202    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
3203    int32_t supported_scene_modes_cnt = 0;
3204    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3205        int32_t val = lookupFwkName(SCENE_MODES_MAP,
3206                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3207                                gCamCapability[cameraId]->supported_scene_modes[i]);
3208        if (val != NAME_NOT_FOUND) {
3209            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3210            supported_indexes[supported_scene_modes_cnt] = i;
3211            supported_scene_modes_cnt++;
3212        }
3213    }
3214
3215    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3216                      avail_scene_modes,
3217                      supported_scene_modes_cnt);
3218
3219    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3220    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3221                      supported_scene_modes_cnt,
3222                      scene_mode_overrides,
3223                      supported_indexes,
3224                      cameraId);
3225    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3226                      scene_mode_overrides,
3227                      supported_scene_modes_cnt*3);
3228
3229    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3230    size = 0;
3231    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3232        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3233                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3234                                 gCamCapability[cameraId]->supported_antibandings[i]);
3235        if (val != NAME_NOT_FOUND) {
3236            avail_antibanding_modes[size] = (uint8_t)val;
3237            size++;
3238        }
3239
3240    }
3241    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3242                      avail_antibanding_modes,
3243                      size);
3244
3245    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3246    size = 0;
3247    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3248        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3249                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3250                                gCamCapability[cameraId]->supported_focus_modes[i]);
3251        if (val != NAME_NOT_FOUND) {
3252            avail_af_modes[size] = (uint8_t)val;
3253            size++;
3254        }
3255    }
3256    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3257                      avail_af_modes,
3258                      size);
3259
3260    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3261    size = 0;
3262    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3263        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3264                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3265                                    gCamCapability[cameraId]->supported_white_balances[i]);
3266        if (val != NAME_NOT_FOUND) {
3267            avail_awb_modes[size] = (uint8_t)val;
3268            size++;
3269        }
3270    }
3271    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3272                      avail_awb_modes,
3273                      size);
3274
3275    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3276    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3277      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3278
3279    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3280            available_flash_levels,
3281            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3282
3283    uint8_t flashAvailable;
3284    if (gCamCapability[cameraId]->flash_available)
3285        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3286    else
3287        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3288    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3289            &flashAvailable, 1);
3290
3291    uint8_t avail_ae_modes[5];
3292    size = 0;
3293    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3294        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3295        size++;
3296    }
3297    if (flashAvailable) {
3298        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3299        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3300        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3301    }
3302    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3303                      avail_ae_modes,
3304                      size);
3305
3306    int32_t sensitivity_range[2];
3307    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3308    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3309    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3310                      sensitivity_range,
3311                      sizeof(sensitivity_range) / sizeof(int32_t));
3312
3313    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3314                      &gCamCapability[cameraId]->max_analog_sensitivity,
3315                      1);
3316
3317    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
3318                      &gCamCapability[cameraId]->picture_min_duration[0],
3319                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
3320
3321    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3322    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3323                      &sensor_orientation,
3324                      1);
3325
3326    int32_t max_output_streams[3] = {1, 3, 1};
3327    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3328                      max_output_streams,
3329                      3);
3330
3331    uint8_t avail_leds = 0;
3332    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3333                      &avail_leds, 0);
3334
3335    uint8_t focus_dist_calibrated;
3336    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3337            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3338            gCamCapability[cameraId]->focus_dist_calibrated);
3339    if (val != NAME_NOT_FOUND) {
3340        focus_dist_calibrated = (uint8_t)val;
3341        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3342                     &focus_dist_calibrated, 1);
3343    }
3344
3345    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3346    size = 0;
3347    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3348            i++) {
3349        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3350                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3351                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3352        if (val != NAME_NOT_FOUND) {
3353            avail_testpattern_modes[size] = val;
3354            size++;
3355        }
3356    }
3357    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3358                      avail_testpattern_modes,
3359                      size);
3360
3361    uint8_t max_pipeline_depth = kMaxInFlight;
3362    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3363                      &max_pipeline_depth,
3364                      1);
3365
3366    int32_t partial_result_count = 2;
3367    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3368                      &partial_result_count,
3369                       1);
3370
3371    uint8_t available_capabilities[] =
3372        {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
3373         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
3374         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM};
3375    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3376                      available_capabilities,
3377                      3);
3378
3379    int32_t max_input_streams = 0;
3380    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3381                      &max_input_streams,
3382                      1);
3383
3384    int32_t io_format_map[] = {};
3385    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3386                      io_format_map, 0);
3387
3388    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
3389    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3390                      &max_latency,
3391                      1);
3392
3393    float optical_axis_angle[2];
3394    optical_axis_angle[0] = 0; //need to verify
3395    optical_axis_angle[1] = 0; //need to verify
3396    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3397                      optical_axis_angle,
3398                      2);
3399
3400    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3401    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3402                      available_hot_pixel_modes,
3403                      1);
3404
3405    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3406                                      ANDROID_EDGE_MODE_FAST};
3407    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3408                      available_edge_modes,
3409                      2);
3410
3411    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3412                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
3413    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3414                      available_noise_red_modes,
3415                      2);
3416
3417    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3418                                         ANDROID_TONEMAP_MODE_FAST,
3419                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
3420    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3421                      available_tonemap_modes,
3422                      3);
3423
3424    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3425    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3426                      available_hot_pixel_map_modes,
3427                      1);
3428
3429
3430    int32_t avail_min_frame_durations_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3431                                                 sizeof(scalar_formats)/sizeof(int32_t) * 4;
3432    int64_t avail_min_frame_durations[avail_min_frame_durations_size];
3433    int pos = 0;
3434    for (int j = 0; j < scalar_formats_count; j++) {
3435        for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3436           avail_min_frame_durations[pos]   = scalar_formats[j];
3437           avail_min_frame_durations[pos+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3438           avail_min_frame_durations[pos+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3439           avail_min_frame_durations[pos+3] = gCamCapability[cameraId]->picture_min_duration[i];
3440           pos+=4;
3441        }
3442    }
3443    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3444                      avail_min_frame_durations,
3445                      avail_min_frame_durations_size);
3446
3447    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
3448       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3449       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3450       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3451       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3452       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3453       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
3454       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3455       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3456       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3457       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3458       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3459       ANDROID_JPEG_GPS_COORDINATES,
3460       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3461       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3462       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3463       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3464       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3465       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3466       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3467       ANDROID_SENSOR_FRAME_DURATION,
3468       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3469       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3470       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3471       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3472       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3473       ANDROID_BLACK_LEVEL_LOCK };
3474    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3475                      available_request_keys,
3476                      sizeof(available_request_keys)/sizeof(int32_t));
3477
3478    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3479       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3480       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
3481       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3482       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3483       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3484       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3485       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3486       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3487       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3488       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3489       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
3490       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3491       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_FORWARD_MATRIX,
3492       ANDROID_SENSOR_COLOR_TRANSFORM, ANDROID_SENSOR_CALIBRATION_TRANSFORM,
3493       ANDROID_SENSOR_SENSITIVITY, ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3494       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
3495       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3496       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
3497       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3498       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3499       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
3500       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
3501       ANDROID_STATISTICS_FACE_SCORES};
3502    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3503                      available_result_keys,
3504                      sizeof(available_result_keys)/sizeof(int32_t));
3505
3506    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3507       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3508       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
3509       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
3510       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3511       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3512       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
3513       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
3514       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3515       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3516       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3517       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3518       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3519       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3520       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3521       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
3522       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3523       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3524       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3525       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3526       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3527       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3528       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3529       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
3530       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3531       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3532       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3533       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3534       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
3535       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3536       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3537       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3538       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3539       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3540       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3541       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3542       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3543       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3544       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3545       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3546       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
3547    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
3548                      available_characteristics_keys,
3549                      sizeof(available_characteristics_keys)/sizeof(int32_t));
3550
3551    gStaticMetadata[cameraId] = staticInfo.release();
3552    return rc;
3553}
3554
3555/*===========================================================================
3556 * FUNCTION   : makeTable
3557 *
3558 * DESCRIPTION: make a table of sizes
3559 *
3560 * PARAMETERS :
3561 *
3562 *
3563 *==========================================================================*/
3564void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
3565                                          int32_t* sizeTable)
3566{
3567    int j = 0;
3568    for (int i = 0; i < size; i++) {
3569        sizeTable[j] = dimTable[i].width;
3570        sizeTable[j+1] = dimTable[i].height;
3571        j+=2;
3572    }
3573}
3574
3575/*===========================================================================
3576 * FUNCTION   : makeFPSTable
3577 *
3578 * DESCRIPTION: make a table of fps ranges
3579 *
3580 * PARAMETERS :
3581 *
3582 *==========================================================================*/
3583void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
3584                                          int32_t* fpsRangesTable)
3585{
3586    int j = 0;
3587    for (int i = 0; i < size; i++) {
3588        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
3589        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
3590        j+=2;
3591    }
3592}
3593
3594/*===========================================================================
3595 * FUNCTION   : makeOverridesList
3596 *
3597 * DESCRIPTION: make a list of scene mode overrides
3598 *
3599 * PARAMETERS :
3600 *
3601 *
3602 *==========================================================================*/
3603void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
3604                                                  uint8_t size, uint8_t* overridesList,
3605                                                  uint8_t* supported_indexes,
3606                                                  int camera_id)
3607{
3608    /*daemon will give a list of overrides for all scene modes.
3609      However we should send the fwk only the overrides for the scene modes
3610      supported by the framework*/
3611    int j = 0, index = 0, supt = 0;
3612    uint8_t focus_override;
3613    for (int i = 0; i < size; i++) {
3614        supt = 0;
3615        index = supported_indexes[i];
3616        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
3617        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3618                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3619                                                    overridesTable[index].awb_mode);
3620        focus_override = (uint8_t)overridesTable[index].af_mode;
3621        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
3622           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
3623              supt = 1;
3624              break;
3625           }
3626        }
3627        if (supt) {
3628           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3629                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3630                                              focus_override);
3631        } else {
3632           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
3633        }
3634        j+=3;
3635    }
3636}
3637
3638/*===========================================================================
3639 * FUNCTION   : getPreviewHalPixelFormat
3640 *
3641 * DESCRIPTION: convert the format to type recognized by framework
3642 *
3643 * PARAMETERS : format : the format from backend
3644 *
3645 ** RETURN    : format recognized by framework
3646 *
3647 *==========================================================================*/
3648int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
3649{
3650    int32_t halPixelFormat;
3651
3652    switch (format) {
3653    case CAM_FORMAT_YUV_420_NV12:
3654        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
3655        break;
3656    case CAM_FORMAT_YUV_420_NV21:
3657        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3658        break;
3659    case CAM_FORMAT_YUV_420_NV21_ADRENO:
3660        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
3661        break;
3662    case CAM_FORMAT_YUV_420_YV12:
3663        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
3664        break;
3665    case CAM_FORMAT_YUV_422_NV16:
3666    case CAM_FORMAT_YUV_422_NV61:
3667    default:
3668        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3669        break;
3670    }
3671    return halPixelFormat;
3672}
3673
3674/*===========================================================================
3675 * FUNCTION   : getSensorSensitivity
3676 *
3677 * DESCRIPTION: convert iso_mode to an integer value
3678 *
3679 * PARAMETERS : iso_mode : the iso_mode supported by sensor
3680 *
3681 ** RETURN    : sensitivity supported by sensor
3682 *
3683 *==========================================================================*/
3684int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
3685{
3686    int32_t sensitivity;
3687
3688    switch (iso_mode) {
3689    case CAM_ISO_MODE_100:
3690        sensitivity = 100;
3691        break;
3692    case CAM_ISO_MODE_200:
3693        sensitivity = 200;
3694        break;
3695    case CAM_ISO_MODE_400:
3696        sensitivity = 400;
3697        break;
3698    case CAM_ISO_MODE_800:
3699        sensitivity = 800;
3700        break;
3701    case CAM_ISO_MODE_1600:
3702        sensitivity = 1600;
3703        break;
3704    default:
3705        sensitivity = -1;
3706        break;
3707    }
3708    return sensitivity;
3709}
3710
3711
3712/*===========================================================================
3713 * FUNCTION   : AddSetParmEntryToBatch
3714 *
3715 * DESCRIPTION: add set parameter entry into batch
3716 *
3717 * PARAMETERS :
3718 *   @p_table     : ptr to parameter buffer
3719 *   @paramType   : parameter type
3720 *   @paramLength : length of parameter value
3721 *   @paramValue  : ptr to parameter value
3722 *
3723 * RETURN     : int32_t type of status
3724 *              NO_ERROR  -- success
3725 *              none-zero failure code
3726 *==========================================================================*/
3727int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
3728                                                          cam_intf_parm_type_t paramType,
3729                                                          uint32_t paramLength,
3730                                                          void *paramValue)
3731{
3732    int position = paramType;
3733    int current, next;
3734
3735    /*************************************************************************
3736    *                 Code to take care of linking next flags                *
3737    *************************************************************************/
3738    current = GET_FIRST_PARAM_ID(p_table);
3739    if (position == current){
3740        //DO NOTHING
3741    } else if (position < current){
3742        SET_NEXT_PARAM_ID(position, p_table, current);
3743        SET_FIRST_PARAM_ID(p_table, position);
3744    } else {
3745        /* Search for the position in the linked list where we need to slot in*/
3746        while (position > GET_NEXT_PARAM_ID(current, p_table))
3747            current = GET_NEXT_PARAM_ID(current, p_table);
3748
3749        /*If node already exists no need to alter linking*/
3750        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
3751            next = GET_NEXT_PARAM_ID(current, p_table);
3752            SET_NEXT_PARAM_ID(current, p_table, position);
3753            SET_NEXT_PARAM_ID(position, p_table, next);
3754        }
3755    }
3756
3757    /*************************************************************************
3758    *                   Copy contents into entry                             *
3759    *************************************************************************/
3760
3761    if (paramLength > sizeof(parm_type_t)) {
3762        ALOGE("%s:Size of input larger than max entry size",__func__);
3763        return BAD_VALUE;
3764    }
3765    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
3766    return NO_ERROR;
3767}
3768
3769/*===========================================================================
3770 * FUNCTION   : lookupFwkName
3771 *
3772 * DESCRIPTION: In case the enum is not same in fwk and backend
3773 *              make sure the parameter is correctly propogated
3774 *
3775 * PARAMETERS  :
3776 *   @arr      : map between the two enums
3777 *   @len      : len of the map
3778 *   @hal_name : name of the hal_parm to map
3779 *
3780 * RETURN     : int type of status
3781 *              fwk_name  -- success
3782 *              none-zero failure code
3783 *==========================================================================*/
3784int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
3785                                             int len, int hal_name)
3786{
3787
3788    for (int i = 0; i < len; i++) {
3789        if (arr[i].hal_name == hal_name)
3790            return arr[i].fwk_name;
3791    }
3792
3793    /* Not able to find matching framework type is not necessarily
3794     * an error case. This happens when mm-camera supports more attributes
3795     * than the frameworks do */
3796    ALOGD("%s: Cannot find matching framework type", __func__);
3797    return NAME_NOT_FOUND;
3798}
3799
3800/*===========================================================================
3801 * FUNCTION   : lookupHalName
3802 *
3803 * DESCRIPTION: In case the enum is not same in fwk and backend
3804 *              make sure the parameter is correctly propogated
3805 *
3806 * PARAMETERS  :
3807 *   @arr      : map between the two enums
3808 *   @len      : len of the map
3809 *   @fwk_name : name of the hal_parm to map
3810 *
3811 * RETURN     : int32_t type of status
3812 *              hal_name  -- success
3813 *              none-zero failure code
3814 *==========================================================================*/
3815int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
3816                                             int len, unsigned int fwk_name)
3817{
3818    for (int i = 0; i < len; i++) {
3819       if (arr[i].fwk_name == fwk_name)
3820           return arr[i].hal_name;
3821    }
3822    ALOGE("%s: Cannot find matching hal type", __func__);
3823    return NAME_NOT_FOUND;
3824}
3825
3826/*===========================================================================
3827 * FUNCTION   : getCapabilities
3828 *
3829 * DESCRIPTION: query camera capabilities
3830 *
3831 * PARAMETERS :
3832 *   @cameraId  : camera Id
3833 *   @info      : camera info struct to be filled in with camera capabilities
3834 *
3835 * RETURN     : int32_t type of status
3836 *              NO_ERROR  -- success
3837 *              none-zero failure code
3838 *==========================================================================*/
3839int QCamera3HardwareInterface::getCamInfo(int cameraId,
3840                                    struct camera_info *info)
3841{
3842    int rc = 0;
3843
3844    if (NULL == gCamCapability[cameraId]) {
3845        rc = initCapabilities(cameraId);
3846        if (rc < 0) {
3847            //pthread_mutex_unlock(&g_camlock);
3848            return rc;
3849        }
3850    }
3851
3852    if (NULL == gStaticMetadata[cameraId]) {
3853        rc = initStaticMetadata(cameraId);
3854        if (rc < 0) {
3855            return rc;
3856        }
3857    }
3858
3859    switch(gCamCapability[cameraId]->position) {
3860    case CAM_POSITION_BACK:
3861        info->facing = CAMERA_FACING_BACK;
3862        break;
3863
3864    case CAM_POSITION_FRONT:
3865        info->facing = CAMERA_FACING_FRONT;
3866        break;
3867
3868    default:
3869        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3870        rc = -1;
3871        break;
3872    }
3873
3874
3875    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3876    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
3877    info->static_camera_characteristics = gStaticMetadata[cameraId];
3878
3879    return rc;
3880}
3881
3882/*===========================================================================
3883 * FUNCTION   : translateMetadata
3884 *
3885 * DESCRIPTION: translate the metadata into camera_metadata_t
3886 *
3887 * PARAMETERS : type of the request
3888 *
3889 *
3890 * RETURN     : success: camera_metadata_t*
3891 *              failure: NULL
3892 *
3893 *==========================================================================*/
3894camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3895{
3896    pthread_mutex_lock(&mMutex);
3897
3898    if (mDefaultMetadata[type] != NULL) {
3899        pthread_mutex_unlock(&mMutex);
3900        return mDefaultMetadata[type];
3901    }
3902    //first time we are handling this request
3903    //fill up the metadata structure using the wrapper class
3904    CameraMetadata settings;
3905    //translate from cam_capability_t to camera_metadata_tag_t
3906    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3907    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3908    int32_t defaultRequestID = 0;
3909    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3910
3911    uint8_t controlIntent = 0;
3912    uint8_t focusMode;
3913    switch (type) {
3914      case CAMERA3_TEMPLATE_PREVIEW:
3915        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3916        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3917        break;
3918      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3919        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3920        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3921        break;
3922      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3923        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3924        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3925        break;
3926      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3927        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3928        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3929        break;
3930      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3931        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3932        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3933        break;
3934      default:
3935        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3936        break;
3937    }
3938    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3939
3940    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
3941        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3942    }
3943    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3944
3945    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3946            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3947
3948    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3949    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3950
3951    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3952    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3953
3954    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3955    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3956
3957    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3958    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3959
3960    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3961    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3962
3963    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3964    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3965
3966    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3967    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3968
3969    /*flash*/
3970    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3971    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3972
3973    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3974    settings.update(ANDROID_FLASH_FIRING_POWER,
3975            &flashFiringLevel, 1);
3976
3977    /* lens */
3978    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3979    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3980
3981    if (gCamCapability[mCameraId]->filter_densities_count) {
3982        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3983        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3984                        gCamCapability[mCameraId]->filter_densities_count);
3985    }
3986
3987    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3988    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3989
3990    /* Exposure time(Update the Min Exposure Time)*/
3991    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3992    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3993
3994    /* frame duration */
3995    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3996    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3997
3998    /* sensitivity */
3999    static const int32_t default_sensitivity = 100;
4000    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
4001
4002    /*edge mode*/
4003    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
4004    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
4005
4006    /*noise reduction mode*/
4007    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
4008    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
4009
4010    /*color correction mode*/
4011    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
4012    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
4013
4014    /*transform matrix mode*/
4015    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
4016    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
4017
4018    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
4019    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
4020
4021    int32_t scaler_crop_region[4];
4022    scaler_crop_region[0] = 0;
4023    scaler_crop_region[1] = 0;
4024    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
4025    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
4026    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
4027
4028    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
4029    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
4030
4031    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4032    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
4033
4034    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
4035                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
4036                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
4037    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
4038
4039    /*focus distance*/
4040    float focus_distance = 0.0;
4041    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
4042
4043    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
4044    float max_range = 0.0;
4045    float max_fixed_fps = 0.0;
4046    int32_t fps_range[2] = {0, 0};
4047    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
4048            i++) {
4049        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
4050            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4051        if (type == CAMERA3_TEMPLATE_PREVIEW ||
4052                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
4053                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
4054            if (range > max_range) {
4055                fps_range[0] =
4056                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4057                fps_range[1] =
4058                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4059                max_range = range;
4060            }
4061        } else {
4062            if (range < 0.01 && max_fixed_fps <
4063                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
4064                fps_range[0] =
4065                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4066                fps_range[1] =
4067                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4068                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4069            }
4070        }
4071    }
4072    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
4073
4074    /*precapture trigger*/
4075    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
4076    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
4077
4078    /*af trigger*/
4079    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
4080    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
4081
4082    /* ae & af regions */
4083    int32_t active_region[] = {
4084            gCamCapability[mCameraId]->active_array_size.left,
4085            gCamCapability[mCameraId]->active_array_size.top,
4086            gCamCapability[mCameraId]->active_array_size.left +
4087                    gCamCapability[mCameraId]->active_array_size.width,
4088            gCamCapability[mCameraId]->active_array_size.top +
4089                    gCamCapability[mCameraId]->active_array_size.height,
4090            1};
4091    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
4092    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
4093
4094    /* black level lock */
4095    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4096    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
4097
4098    /* face detect mode */
4099    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
4100    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
4101
4102    /* lens shading map mode */
4103    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4104    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
4105
4106    mDefaultMetadata[type] = settings.release();
4107
4108    pthread_mutex_unlock(&mMutex);
4109    return mDefaultMetadata[type];
4110}
4111
4112/*===========================================================================
4113 * FUNCTION   : setFrameParameters
4114 *
4115 * DESCRIPTION: set parameters per frame as requested in the metadata from
4116 *              framework
4117 *
4118 * PARAMETERS :
4119 *   @request   : request that needs to be serviced
4120 *   @streamID : Stream ID of all the requested streams
4121 *
4122 * RETURN     : success: NO_ERROR
4123 *              failure:
4124 *==========================================================================*/
4125int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
4126                    cam_stream_ID_t streamID)
4127{
4128    /*translate from camera_metadata_t type to parm_type_t*/
4129    int rc = 0;
4130    if (request->settings == NULL && mFirstRequest) {
4131        /*settings cannot be null for the first request*/
4132        return BAD_VALUE;
4133    }
4134
4135    int32_t hal_version = CAM_HAL_V3;
4136
4137    memset(mParameters, 0, sizeof(parm_buffer_t));
4138    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
4139    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
4140                sizeof(hal_version), &hal_version);
4141    if (rc < 0) {
4142        ALOGE("%s: Failed to set hal version in the parameters", __func__);
4143        return BAD_VALUE;
4144    }
4145
4146    /*we need to update the frame number in the parameters*/
4147    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
4148                                sizeof(request->frame_number), &(request->frame_number));
4149    if (rc < 0) {
4150        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4151        return BAD_VALUE;
4152    }
4153
4154    /* Update stream id of all the requested buffers */
4155    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
4156                                sizeof(cam_stream_ID_t), &streamID);
4157
4158    if (rc < 0) {
4159        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
4160        return BAD_VALUE;
4161    }
4162
4163    if(request->settings != NULL){
4164        rc = translateMetadataToParameters(request);
4165    }
4166
4167    /*set the parameters to backend*/
4168    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
4169    return rc;
4170}
4171
4172/*===========================================================================
4173 * FUNCTION   : translateMetadataToParameters
4174 *
4175 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
4176 *
4177 *
4178 * PARAMETERS :
4179 *   @request  : request sent from framework
4180 *
4181 *
4182 * RETURN     : success: NO_ERROR
4183 *              failure:
4184 *==========================================================================*/
4185int QCamera3HardwareInterface::translateMetadataToParameters
4186                                  (const camera3_capture_request_t *request)
4187{
4188    int rc = 0;
4189    CameraMetadata frame_settings;
4190    frame_settings = request->settings;
4191
4192    /* Do not change the order of the following list unless you know what you are
4193     * doing.
4194     * The order is laid out in such a way that parameters in the front of the table
4195     * may be used to override the parameters later in the table. Examples are:
4196     * 1. META_MODE should precede AEC/AWB/AF MODE
4197     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
4198     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
4199     * 4. Any mode should precede it's corresponding settings
4200     */
4201    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4202        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4203        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
4204                sizeof(metaMode), &metaMode);
4205        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4206           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4207           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4208                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4209                                             fwk_sceneMode);
4210           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
4211                sizeof(sceneMode), &sceneMode);
4212        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4213           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4214           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
4215                sizeof(sceneMode), &sceneMode);
4216        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4217           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4218           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
4219                sizeof(sceneMode), &sceneMode);
4220        }
4221    }
4222
4223    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4224        uint8_t fwk_aeMode =
4225            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4226        uint8_t aeMode;
4227        int32_t redeye;
4228
4229        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4230            aeMode = CAM_AE_MODE_OFF;
4231        } else {
4232            aeMode = CAM_AE_MODE_ON;
4233        }
4234        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4235            redeye = 1;
4236        } else {
4237            redeye = 0;
4238        }
4239
4240        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
4241                                          sizeof(AE_FLASH_MODE_MAP),
4242                                          fwk_aeMode);
4243        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
4244                sizeof(aeMode), &aeMode);
4245        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
4246                sizeof(flashMode), &flashMode);
4247        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
4248                sizeof(redeye), &redeye);
4249    }
4250
4251    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
4252        uint8_t fwk_whiteLevel =
4253            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
4254        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
4255                sizeof(WHITE_BALANCE_MODES_MAP),
4256                fwk_whiteLevel);
4257        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
4258                sizeof(whiteLevel), &whiteLevel);
4259    }
4260
4261    float focalDistance = -1.0;
4262    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
4263        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
4264        rc = AddSetParmEntryToBatch(mParameters,
4265                CAM_INTF_META_LENS_FOCUS_DISTANCE,
4266                sizeof(focalDistance), &focalDistance);
4267    }
4268
4269    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
4270        uint8_t fwk_focusMode =
4271            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
4272        uint8_t focusMode;
4273        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
4274            focusMode = CAM_FOCUS_MODE_INFINITY;
4275        } else{
4276         focusMode = lookupHalName(FOCUS_MODES_MAP,
4277                                   sizeof(FOCUS_MODES_MAP),
4278                                   fwk_focusMode);
4279        }
4280        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
4281                sizeof(focusMode), &focusMode);
4282    }
4283
4284    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
4285        int32_t antibandingMode =
4286            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
4287        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
4288                sizeof(antibandingMode), &antibandingMode);
4289    }
4290
4291    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4292        int32_t expCompensation = frame_settings.find(
4293            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4294        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4295            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4296        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4297            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4298        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
4299          sizeof(expCompensation), &expCompensation);
4300    }
4301
4302    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
4303        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
4304        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
4305                sizeof(aeLock), &aeLock);
4306    }
4307    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4308        cam_fps_range_t fps_range;
4309        fps_range.min_fps =
4310            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
4311        fps_range.max_fps =
4312            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4313        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
4314                sizeof(fps_range), &fps_range);
4315    }
4316
4317    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
4318        uint8_t awbLock =
4319            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
4320        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
4321                sizeof(awbLock), &awbLock);
4322    }
4323
4324    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
4325        uint8_t fwk_effectMode =
4326            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
4327        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
4328                sizeof(EFFECT_MODES_MAP),
4329                fwk_effectMode);
4330        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
4331                sizeof(effectMode), &effectMode);
4332    }
4333
4334    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
4335        uint8_t colorCorrectMode =
4336            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
4337        rc =
4338            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
4339                    sizeof(colorCorrectMode), &colorCorrectMode);
4340    }
4341
4342    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
4343        cam_color_correct_gains_t colorCorrectGains;
4344        for (int i = 0; i < 4; i++) {
4345            colorCorrectGains.gains[i] =
4346                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
4347        }
4348        rc =
4349            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
4350                    sizeof(colorCorrectGains), &colorCorrectGains);
4351    }
4352
4353    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
4354        cam_color_correct_matrix_t colorCorrectTransform;
4355        cam_rational_type_t transform_elem;
4356        int num = 0;
4357        for (int i = 0; i < 3; i++) {
4358           for (int j = 0; j < 3; j++) {
4359              transform_elem.numerator =
4360                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
4361              transform_elem.denominator =
4362                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
4363              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
4364              num++;
4365           }
4366        }
4367        rc =
4368            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
4369                    sizeof(colorCorrectTransform), &colorCorrectTransform);
4370    }
4371
4372    cam_trigger_t aecTrigger;
4373    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
4374    aecTrigger.trigger_id = -1;
4375    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
4376        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
4377        aecTrigger.trigger =
4378            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
4379        aecTrigger.trigger_id =
4380            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
4381    }
4382    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
4383                                sizeof(aecTrigger), &aecTrigger);
4384
4385    /*af_trigger must come with a trigger id*/
4386    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
4387        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
4388        cam_trigger_t af_trigger;
4389        af_trigger.trigger =
4390            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
4391        af_trigger.trigger_id =
4392            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
4393        rc = AddSetParmEntryToBatch(mParameters,
4394                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
4395    }
4396
4397    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
4398        int32_t demosaic =
4399            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
4400        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
4401                sizeof(demosaic), &demosaic);
4402    }
4403
4404    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
4405        cam_edge_application_t edge_application;
4406        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4407        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
4408            edge_application.sharpness = 0;
4409        } else {
4410            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
4411                uint8_t edgeStrength =
4412                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
4413                edge_application.sharpness = (int32_t)edgeStrength;
4414            } else {
4415                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
4416            }
4417        }
4418        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
4419                sizeof(edge_application), &edge_application);
4420    }
4421
4422    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
4423        int32_t respectFlashMode = 1;
4424        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4425            uint8_t fwk_aeMode =
4426                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4427            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
4428                respectFlashMode = 0;
4429                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
4430                    __func__);
4431            }
4432        }
4433        if (respectFlashMode) {
4434            uint8_t flashMode =
4435                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
4436            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
4437                                          sizeof(FLASH_MODES_MAP),
4438                                          flashMode);
4439            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
4440            // To check: CAM_INTF_META_FLASH_MODE usage
4441            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
4442                          sizeof(flashMode), &flashMode);
4443        }
4444    }
4445
4446    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
4447        uint8_t flashPower =
4448            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
4449        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
4450                sizeof(flashPower), &flashPower);
4451    }
4452
4453    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
4454        int64_t flashFiringTime =
4455            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
4456        rc = AddSetParmEntryToBatch(mParameters,
4457                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
4458    }
4459
4460    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
4461        uint8_t hotPixelMode =
4462            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
4463        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
4464                sizeof(hotPixelMode), &hotPixelMode);
4465    }
4466
4467    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
4468        float lensAperture =
4469            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
4470        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
4471                sizeof(lensAperture), &lensAperture);
4472    }
4473
4474    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
4475        float filterDensity =
4476            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
4477        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
4478                sizeof(filterDensity), &filterDensity);
4479    }
4480
4481    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4482        float focalLength =
4483            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4484        rc = AddSetParmEntryToBatch(mParameters,
4485                CAM_INTF_META_LENS_FOCAL_LENGTH,
4486                sizeof(focalLength), &focalLength);
4487    }
4488
4489    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
4490        uint8_t optStabMode =
4491            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
4492        rc = AddSetParmEntryToBatch(mParameters,
4493                CAM_INTF_META_LENS_OPT_STAB_MODE,
4494                sizeof(optStabMode), &optStabMode);
4495    }
4496
4497    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4498        uint8_t noiseRedMode =
4499            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4500        rc = AddSetParmEntryToBatch(mParameters,
4501                CAM_INTF_META_NOISE_REDUCTION_MODE,
4502                sizeof(noiseRedMode), &noiseRedMode);
4503    }
4504
4505    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
4506        uint8_t noiseRedStrength =
4507            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
4508        rc = AddSetParmEntryToBatch(mParameters,
4509                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
4510                sizeof(noiseRedStrength), &noiseRedStrength);
4511    }
4512
4513    cam_crop_region_t scalerCropRegion;
4514    bool scalerCropSet = false;
4515    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
4516        scalerCropRegion.left =
4517            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
4518        scalerCropRegion.top =
4519            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
4520        scalerCropRegion.width =
4521            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
4522        scalerCropRegion.height =
4523            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
4524        rc = AddSetParmEntryToBatch(mParameters,
4525                CAM_INTF_META_SCALER_CROP_REGION,
4526                sizeof(scalerCropRegion), &scalerCropRegion);
4527        scalerCropSet = true;
4528    }
4529
4530    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
4531        int64_t sensorExpTime =
4532            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
4533        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
4534        rc = AddSetParmEntryToBatch(mParameters,
4535                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
4536                sizeof(sensorExpTime), &sensorExpTime);
4537    }
4538
4539    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
4540        int64_t sensorFrameDuration =
4541            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
4542        int64_t minFrameDuration = getMinFrameDuration(request);
4543        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
4544        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
4545            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
4546        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
4547        rc = AddSetParmEntryToBatch(mParameters,
4548                CAM_INTF_META_SENSOR_FRAME_DURATION,
4549                sizeof(sensorFrameDuration), &sensorFrameDuration);
4550    }
4551
4552    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4553        int32_t sensorSensitivity =
4554            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4555        if (sensorSensitivity <
4556                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
4557            sensorSensitivity =
4558                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
4559        if (sensorSensitivity >
4560                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
4561            sensorSensitivity =
4562                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
4563        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
4564        rc = AddSetParmEntryToBatch(mParameters,
4565                CAM_INTF_META_SENSOR_SENSITIVITY,
4566                sizeof(sensorSensitivity), &sensorSensitivity);
4567    }
4568
4569    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
4570        int32_t shadingMode =
4571            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
4572        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
4573                sizeof(shadingMode), &shadingMode);
4574    }
4575
4576    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
4577        uint8_t shadingStrength =
4578            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
4579        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
4580                sizeof(shadingStrength), &shadingStrength);
4581    }
4582
4583    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
4584        uint8_t fwk_facedetectMode =
4585            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
4586        uint8_t facedetectMode =
4587            lookupHalName(FACEDETECT_MODES_MAP,
4588                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
4589        rc = AddSetParmEntryToBatch(mParameters,
4590                CAM_INTF_META_STATS_FACEDETECT_MODE,
4591                sizeof(facedetectMode), &facedetectMode);
4592    }
4593
4594    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
4595        uint8_t histogramMode =
4596            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
4597        rc = AddSetParmEntryToBatch(mParameters,
4598                CAM_INTF_META_STATS_HISTOGRAM_MODE,
4599                sizeof(histogramMode), &histogramMode);
4600    }
4601
4602    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
4603        uint8_t sharpnessMapMode =
4604            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
4605        rc = AddSetParmEntryToBatch(mParameters,
4606                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
4607                sizeof(sharpnessMapMode), &sharpnessMapMode);
4608    }
4609
4610    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
4611        uint8_t tonemapMode =
4612            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
4613        rc = AddSetParmEntryToBatch(mParameters,
4614                CAM_INTF_META_TONEMAP_MODE,
4615                sizeof(tonemapMode), &tonemapMode);
4616    }
4617    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
4618    /*All tonemap channels will have the same number of points*/
4619    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
4620        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
4621        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
4622        cam_rgb_tonemap_curves tonemapCurves;
4623        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
4624
4625        /* ch0 = G*/
4626        int point = 0;
4627        cam_tonemap_curve_t tonemapCurveGreen;
4628        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
4629            for (int j = 0; j < 2; j++) {
4630               tonemapCurveGreen.tonemap_points[i][j] =
4631                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
4632               point++;
4633            }
4634        }
4635        tonemapCurves.curves[0] = tonemapCurveGreen;
4636
4637        /* ch 1 = B */
4638        point = 0;
4639        cam_tonemap_curve_t tonemapCurveBlue;
4640        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4641            for (int j = 0; j < 2; j++) {
4642               tonemapCurveBlue.tonemap_points[i][j] =
4643                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
4644               point++;
4645            }
4646        }
4647        tonemapCurves.curves[1] = tonemapCurveBlue;
4648
4649        /* ch 2 = R */
4650        point = 0;
4651        cam_tonemap_curve_t tonemapCurveRed;
4652        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4653            for (int j = 0; j < 2; j++) {
4654               tonemapCurveRed.tonemap_points[i][j] =
4655                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
4656               point++;
4657            }
4658        }
4659        tonemapCurves.curves[2] = tonemapCurveRed;
4660
4661        rc = AddSetParmEntryToBatch(mParameters,
4662                CAM_INTF_META_TONEMAP_CURVES,
4663                sizeof(tonemapCurves), &tonemapCurves);
4664    }
4665
4666    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4667        uint8_t captureIntent =
4668            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4669        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
4670                sizeof(captureIntent), &captureIntent);
4671    }
4672
4673    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
4674        uint8_t blackLevelLock =
4675            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
4676        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
4677                sizeof(blackLevelLock), &blackLevelLock);
4678    }
4679
4680    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
4681        uint8_t lensShadingMapMode =
4682            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
4683        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
4684                sizeof(lensShadingMapMode), &lensShadingMapMode);
4685    }
4686
4687    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
4688        cam_area_t roi;
4689        bool reset = true;
4690        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
4691        if (scalerCropSet) {
4692            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4693        }
4694        if (reset) {
4695            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
4696                    sizeof(roi), &roi);
4697        }
4698    }
4699
4700    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
4701        cam_area_t roi;
4702        bool reset = true;
4703        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
4704        if (scalerCropSet) {
4705            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4706        }
4707        if (reset) {
4708            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
4709                    sizeof(roi), &roi);
4710        }
4711    }
4712
4713    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
4714        cam_area_t roi;
4715        bool reset = true;
4716        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
4717        if (scalerCropSet) {
4718            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4719        }
4720        if (reset) {
4721            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
4722                    sizeof(roi), &roi);
4723        }
4724    }
4725
4726    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
4727        cam_test_pattern_data_t testPatternData;
4728        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
4729        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
4730               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
4731
4732        memset(&testPatternData, 0, sizeof(testPatternData));
4733        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
4734        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
4735                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
4736            int32_t* fwk_testPatternData = frame_settings.find(
4737                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
4738            testPatternData.r = fwk_testPatternData[0];
4739            testPatternData.b = fwk_testPatternData[3];
4740            switch (gCamCapability[mCameraId]->color_arrangement) {
4741            case CAM_FILTER_ARRANGEMENT_RGGB:
4742            case CAM_FILTER_ARRANGEMENT_GRBG:
4743                testPatternData.gr = fwk_testPatternData[1];
4744                testPatternData.gb = fwk_testPatternData[2];
4745                break;
4746            case CAM_FILTER_ARRANGEMENT_GBRG:
4747            case CAM_FILTER_ARRANGEMENT_BGGR:
4748                testPatternData.gr = fwk_testPatternData[2];
4749                testPatternData.gb = fwk_testPatternData[1];
4750                break;
4751            default:
4752                ALOGE("%s: color arrangement %d is not supported", __func__,
4753                    gCamCapability[mCameraId]->color_arrangement);
4754                break;
4755            }
4756        }
4757        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
4758            sizeof(testPatternData), &testPatternData);
4759    }
4760    return rc;
4761}
4762
4763/*===========================================================================
4764 * FUNCTION   : getJpegSettings
4765 *
4766 * DESCRIPTION: save the jpeg settings in the HAL
4767 *
4768 *
4769 * PARAMETERS :
4770 *   @settings  : frame settings information from framework
4771 *
4772 *
4773 * RETURN     : success: NO_ERROR
4774 *              failure:
4775 *==========================================================================*/
4776int QCamera3HardwareInterface::getJpegSettings
4777                                  (const camera_metadata_t *settings)
4778{
4779    if (mJpegSettings) {
4780        if (mJpegSettings->gps_timestamp) {
4781            free(mJpegSettings->gps_timestamp);
4782            mJpegSettings->gps_timestamp = NULL;
4783        }
4784        if (mJpegSettings->gps_coordinates) {
4785            for (int i = 0; i < 3; i++) {
4786                free(mJpegSettings->gps_coordinates[i]);
4787                mJpegSettings->gps_coordinates[i] = NULL;
4788            }
4789        }
4790        free(mJpegSettings);
4791        mJpegSettings = NULL;
4792    }
4793    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
4794    CameraMetadata jpeg_settings;
4795    jpeg_settings = settings;
4796
4797    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
4798        mJpegSettings->jpeg_orientation =
4799            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
4800    } else {
4801        mJpegSettings->jpeg_orientation = 0;
4802    }
4803    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
4804        mJpegSettings->jpeg_quality =
4805            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
4806    } else {
4807        mJpegSettings->jpeg_quality = 85;
4808    }
4809    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
4810        mJpegSettings->thumbnail_size.width =
4811            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
4812        mJpegSettings->thumbnail_size.height =
4813            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
4814    } else {
4815        mJpegSettings->thumbnail_size.width = 0;
4816        mJpegSettings->thumbnail_size.height = 0;
4817    }
4818    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
4819        for (int i = 0; i < 3; i++) {
4820            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
4821            *(mJpegSettings->gps_coordinates[i]) =
4822                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
4823        }
4824    } else{
4825       for (int i = 0; i < 3; i++) {
4826            mJpegSettings->gps_coordinates[i] = NULL;
4827        }
4828    }
4829
4830    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
4831        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
4832        *(mJpegSettings->gps_timestamp) =
4833            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
4834    } else {
4835        mJpegSettings->gps_timestamp = NULL;
4836    }
4837
4838    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
4839        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
4840        for (int i = 0; i < len; i++) {
4841            mJpegSettings->gps_processing_method[i] =
4842                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
4843        }
4844        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
4845            mJpegSettings->gps_processing_method[len] = '\0';
4846        }
4847    } else {
4848        mJpegSettings->gps_processing_method[0] = '\0';
4849    }
4850
4851    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4852        mJpegSettings->sensor_sensitivity =
4853            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4854    } else {
4855        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
4856    }
4857
4858    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
4859
4860    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4861        mJpegSettings->lens_focal_length =
4862            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4863    }
4864    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4865        mJpegSettings->exposure_compensation =
4866            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4867    }
4868    mJpegSettings->sharpness = 10; //default value
4869    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
4870        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4871        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
4872            mJpegSettings->sharpness = 0;
4873        }
4874    }
4875    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
4876    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
4877    mJpegSettings->is_jpeg_format = true;
4878    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
4879    return 0;
4880}
4881
4882/*===========================================================================
4883 * FUNCTION   : captureResultCb
4884 *
4885 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
4886 *
4887 * PARAMETERS :
4888 *   @frame  : frame information from mm-camera-interface
4889 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
4890 *   @userdata: userdata
4891 *
4892 * RETURN     : NONE
4893 *==========================================================================*/
4894void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
4895                camera3_stream_buffer_t *buffer,
4896                uint32_t frame_number, void *userdata)
4897{
4898    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
4899    if (hw == NULL) {
4900        ALOGE("%s: Invalid hw %p", __func__, hw);
4901        return;
4902    }
4903
4904    hw->captureResultCb(metadata, buffer, frame_number);
4905    return;
4906}
4907
4908
4909/*===========================================================================
4910 * FUNCTION   : initialize
4911 *
4912 * DESCRIPTION: Pass framework callback pointers to HAL
4913 *
4914 * PARAMETERS :
4915 *
4916 *
4917 * RETURN     : Success : 0
4918 *              Failure: -ENODEV
4919 *==========================================================================*/
4920
4921int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
4922                                  const camera3_callback_ops_t *callback_ops)
4923{
4924    ALOGV("%s: E", __func__);
4925    QCamera3HardwareInterface *hw =
4926        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4927    if (!hw) {
4928        ALOGE("%s: NULL camera device", __func__);
4929        return -ENODEV;
4930    }
4931
4932    int rc = hw->initialize(callback_ops);
4933    ALOGV("%s: X", __func__);
4934    return rc;
4935}
4936
4937/*===========================================================================
4938 * FUNCTION   : configure_streams
4939 *
4940 * DESCRIPTION:
4941 *
4942 * PARAMETERS :
4943 *
4944 *
4945 * RETURN     : Success: 0
4946 *              Failure: -EINVAL (if stream configuration is invalid)
4947 *                       -ENODEV (fatal error)
4948 *==========================================================================*/
4949
4950int QCamera3HardwareInterface::configure_streams(
4951        const struct camera3_device *device,
4952        camera3_stream_configuration_t *stream_list)
4953{
4954    ALOGV("%s: E", __func__);
4955    QCamera3HardwareInterface *hw =
4956        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4957    if (!hw) {
4958        ALOGE("%s: NULL camera device", __func__);
4959        return -ENODEV;
4960    }
4961    int rc = hw->configureStreams(stream_list);
4962    ALOGV("%s: X", __func__);
4963    return rc;
4964}
4965
4966/*===========================================================================
4967 * FUNCTION   : register_stream_buffers
4968 *
4969 * DESCRIPTION: Register stream buffers with the device
4970 *
4971 * PARAMETERS :
4972 *
4973 * RETURN     :
4974 *==========================================================================*/
4975int QCamera3HardwareInterface::register_stream_buffers(
4976        const struct camera3_device *device,
4977        const camera3_stream_buffer_set_t *buffer_set)
4978{
4979    ALOGV("%s: E", __func__);
4980    QCamera3HardwareInterface *hw =
4981        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4982    if (!hw) {
4983        ALOGE("%s: NULL camera device", __func__);
4984        return -ENODEV;
4985    }
4986    int rc = hw->registerStreamBuffers(buffer_set);
4987    ALOGV("%s: X", __func__);
4988    return rc;
4989}
4990
4991/*===========================================================================
4992 * FUNCTION   : construct_default_request_settings
4993 *
4994 * DESCRIPTION: Configure a settings buffer to meet the required use case
4995 *
4996 * PARAMETERS :
4997 *
4998 *
4999 * RETURN     : Success: Return valid metadata
5000 *              Failure: Return NULL
5001 *==========================================================================*/
5002const camera_metadata_t* QCamera3HardwareInterface::
5003    construct_default_request_settings(const struct camera3_device *device,
5004                                        int type)
5005{
5006
5007    ALOGV("%s: E", __func__);
5008    camera_metadata_t* fwk_metadata = NULL;
5009    QCamera3HardwareInterface *hw =
5010        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5011    if (!hw) {
5012        ALOGE("%s: NULL camera device", __func__);
5013        return NULL;
5014    }
5015
5016    fwk_metadata = hw->translateCapabilityToMetadata(type);
5017
5018    ALOGV("%s: X", __func__);
5019    return fwk_metadata;
5020}
5021
5022/*===========================================================================
5023 * FUNCTION   : process_capture_request
5024 *
5025 * DESCRIPTION:
5026 *
5027 * PARAMETERS :
5028 *
5029 *
5030 * RETURN     :
5031 *==========================================================================*/
5032int QCamera3HardwareInterface::process_capture_request(
5033                    const struct camera3_device *device,
5034                    camera3_capture_request_t *request)
5035{
5036    ALOGV("%s: E", __func__);
5037    QCamera3HardwareInterface *hw =
5038        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5039    if (!hw) {
5040        ALOGE("%s: NULL camera device", __func__);
5041        return -EINVAL;
5042    }
5043
5044    int rc = hw->processCaptureRequest(request);
5045    ALOGV("%s: X", __func__);
5046    return rc;
5047}
5048
5049/*===========================================================================
5050 * FUNCTION   : get_metadata_vendor_tag_ops
5051 *
5052 * DESCRIPTION:
5053 *
5054 * PARAMETERS :
5055 *
5056 *
5057 * RETURN     :
5058 *==========================================================================*/
5059
5060void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
5061                const struct camera3_device *device,
5062                vendor_tag_query_ops_t* ops)
5063{
5064    ALOGV("%s: E", __func__);
5065    QCamera3HardwareInterface *hw =
5066        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5067    if (!hw) {
5068        ALOGE("%s: NULL camera device", __func__);
5069        return;
5070    }
5071
5072    hw->getMetadataVendorTagOps(ops);
5073    ALOGV("%s: X", __func__);
5074    return;
5075}
5076
5077/*===========================================================================
5078 * FUNCTION   : dump
5079 *
5080 * DESCRIPTION:
5081 *
5082 * PARAMETERS :
5083 *
5084 *
5085 * RETURN     :
5086 *==========================================================================*/
5087
5088void QCamera3HardwareInterface::dump(
5089                const struct camera3_device *device, int fd)
5090{
5091    ALOGV("%s: E", __func__);
5092    QCamera3HardwareInterface *hw =
5093        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5094    if (!hw) {
5095        ALOGE("%s: NULL camera device", __func__);
5096        return;
5097    }
5098
5099    hw->dump(fd);
5100    ALOGV("%s: X", __func__);
5101    return;
5102}
5103
5104/*===========================================================================
5105 * FUNCTION   : flush
5106 *
5107 * DESCRIPTION:
5108 *
5109 * PARAMETERS :
5110 *
5111 *
5112 * RETURN     :
5113 *==========================================================================*/
5114
5115int QCamera3HardwareInterface::flush(
5116                const struct camera3_device *device)
5117{
5118    int rc;
5119    ALOGV("%s: E", __func__);
5120    QCamera3HardwareInterface *hw =
5121        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5122    if (!hw) {
5123        ALOGE("%s: NULL camera device", __func__);
5124        return -EINVAL;
5125    }
5126
5127    rc = hw->flush();
5128    ALOGV("%s: X", __func__);
5129    return rc;
5130}
5131
5132/*===========================================================================
5133 * FUNCTION   : close_camera_device
5134 *
5135 * DESCRIPTION:
5136 *
5137 * PARAMETERS :
5138 *
5139 *
5140 * RETURN     :
5141 *==========================================================================*/
5142int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
5143{
5144    ALOGV("%s: E", __func__);
5145    int ret = NO_ERROR;
5146    QCamera3HardwareInterface *hw =
5147        reinterpret_cast<QCamera3HardwareInterface *>(
5148            reinterpret_cast<camera3_device_t *>(device)->priv);
5149    if (!hw) {
5150        ALOGE("NULL camera device");
5151        return BAD_VALUE;
5152    }
5153    delete hw;
5154
5155    pthread_mutex_lock(&mCameraSessionLock);
5156    mCameraSessionActive = 0;
5157    pthread_mutex_unlock(&mCameraSessionLock);
5158    ALOGV("%s: X", __func__);
5159    return ret;
5160}
5161
5162/*===========================================================================
5163 * FUNCTION   : getWaveletDenoiseProcessPlate
5164 *
5165 * DESCRIPTION: query wavelet denoise process plate
5166 *
5167 * PARAMETERS : None
5168 *
5169 * RETURN     : WNR prcocess plate vlaue
5170 *==========================================================================*/
5171cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
5172{
5173    char prop[PROPERTY_VALUE_MAX];
5174    memset(prop, 0, sizeof(prop));
5175    property_get("persist.denoise.process.plates", prop, "0");
5176    int processPlate = atoi(prop);
5177    switch(processPlate) {
5178    case 0:
5179        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
5180    case 1:
5181        return CAM_WAVELET_DENOISE_CBCR_ONLY;
5182    case 2:
5183        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5184    case 3:
5185        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
5186    default:
5187        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5188    }
5189}
5190
5191/*===========================================================================
5192 * FUNCTION   : needRotationReprocess
5193 *
5194 * DESCRIPTION: if rotation needs to be done by reprocess in pp
5195 *
5196 * PARAMETERS : none
5197 *
5198 * RETURN     : true: needed
5199 *              false: no need
5200 *==========================================================================*/
5201bool QCamera3HardwareInterface::needRotationReprocess()
5202{
5203
5204    if (!mJpegSettings->is_jpeg_format) {
5205        // RAW image, no need to reprocess
5206        return false;
5207    }
5208
5209    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
5210        mJpegSettings->jpeg_orientation > 0) {
5211        // current rotation is not zero, and pp has the capability to process rotation
5212        ALOGD("%s: need do reprocess for rotation", __func__);
5213        return true;
5214    }
5215
5216    return false;
5217}
5218
5219/*===========================================================================
5220 * FUNCTION   : needReprocess
5221 *
5222 * DESCRIPTION: if reprocess in needed
5223 *
5224 * PARAMETERS : none
5225 *
5226 * RETURN     : true: needed
5227 *              false: no need
5228 *==========================================================================*/
5229bool QCamera3HardwareInterface::needReprocess()
5230{
5231    if (!mJpegSettings->is_jpeg_format) {
5232        // RAW image, no need to reprocess
5233        return false;
5234    }
5235
5236    if ((mJpegSettings->min_required_pp_mask > 0) ||
5237         isWNREnabled()) {
5238        // TODO: add for ZSL HDR later
5239        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5240        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5241        return true;
5242    }
5243    return needRotationReprocess();
5244}
5245
5246/*===========================================================================
5247 * FUNCTION   : addOnlineReprocChannel
5248 *
5249 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
5250 *              coming from input channel
5251 *
5252 * PARAMETERS :
5253 *   @pInputChannel : ptr to input channel whose frames will be post-processed
5254 *
5255 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
5256 *==========================================================================*/
5257QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
5258              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
5259{
5260    int32_t rc = NO_ERROR;
5261    QCamera3ReprocessChannel *pChannel = NULL;
5262    if (pInputChannel == NULL) {
5263        ALOGE("%s: input channel obj is NULL", __func__);
5264        return NULL;
5265    }
5266
5267    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5268            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5269    if (NULL == pChannel) {
5270        ALOGE("%s: no mem for reprocess channel", __func__);
5271        return NULL;
5272    }
5273
5274    // Capture channel, only need snapshot and postview streams start together
5275    mm_camera_channel_attr_t attr;
5276    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
5277    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
5278    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
5279    rc = pChannel->initialize();
5280    if (rc != NO_ERROR) {
5281        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5282        delete pChannel;
5283        return NULL;
5284    }
5285
5286    // pp feature config
5287    cam_pp_feature_config_t pp_config;
5288    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5289    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
5290        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5291        pp_config.sharpness = mJpegSettings->sharpness;
5292    }
5293
5294    if (isWNREnabled()) {
5295        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5296        pp_config.denoise2d.denoise_enable = 1;
5297        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5298    }
5299    if (needRotationReprocess()) {
5300        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5301        int rotation = mJpegSettings->jpeg_orientation;
5302        if (rotation == 0) {
5303            pp_config.rotation = ROTATE_0;
5304        } else if (rotation == 90) {
5305            pp_config.rotation = ROTATE_90;
5306        } else if (rotation == 180) {
5307            pp_config.rotation = ROTATE_180;
5308        } else if (rotation == 270) {
5309            pp_config.rotation = ROTATE_270;
5310        }
5311    }
5312
5313   rc = pChannel->addReprocStreamsFromSource(pp_config,
5314                                             pInputChannel,
5315                                             mMetadataChannel);
5316
5317    if (rc != NO_ERROR) {
5318        delete pChannel;
5319        return NULL;
5320    }
5321    return pChannel;
5322}
5323
5324int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
5325{
5326    return gCamCapability[mCameraId]->min_num_pp_bufs;
5327}
5328
5329bool QCamera3HardwareInterface::isWNREnabled() {
5330    return gCamCapability[mCameraId]->isWnrSupported;
5331}
5332
5333}; //end namespace qcamera
5334