QCamera3HWI.cpp revision 3cf45155bb833d30f02ca71c3255c0e4580868ff
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
87    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
88    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
89    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
91    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
92    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
93    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
94    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
95    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
96    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
97    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
98    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
99    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
100    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
101    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
105    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
106    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
107    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
108    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
110    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
111};
112
113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
121    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
124    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
126};
127
128const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
129    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
130    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
131    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
132};
133
134const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
136    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
137};
138
139const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
140                                             320, 240, 176, 144, 0, 0};
141
142camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
143    initialize:                         QCamera3HardwareInterface::initialize,
144    configure_streams:                  QCamera3HardwareInterface::configure_streams,
145    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
146    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
147    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
148    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
149    dump:                               QCamera3HardwareInterface::dump,
150    flush:                              QCamera3HardwareInterface::flush,
151    reserved:                           {0},
152};
153
154int QCamera3HardwareInterface::kMaxInFlight = 5;
155
156/*===========================================================================
157 * FUNCTION   : QCamera3HardwareInterface
158 *
159 * DESCRIPTION: constructor of QCamera3HardwareInterface
160 *
161 * PARAMETERS :
162 *   @cameraId  : camera ID
163 *
164 * RETURN     : none
165 *==========================================================================*/
166QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
167    : mCameraId(cameraId),
168      mCameraHandle(NULL),
169      mCameraOpened(false),
170      mCameraInitialized(false),
171      mCallbackOps(NULL),
172      mInputStream(NULL),
173      mMetadataChannel(NULL),
174      mPictureChannel(NULL),
175      mFirstRequest(false),
176      mParamHeap(NULL),
177      mParameters(NULL),
178      mJpegSettings(NULL),
179      mIsZslMode(false),
180      mMinProcessedFrameDuration(0),
181      mMinJpegFrameDuration(0),
182      mMinRawFrameDuration(0),
183      m_pPowerModule(NULL),
184      mHdrHint(false)
185{
186    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
187    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
188    mCameraDevice.common.close = close_camera_device;
189    mCameraDevice.ops = &mCameraOps;
190    mCameraDevice.priv = this;
191    gCamCapability[cameraId]->version = CAM_HAL_V3;
192    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
193    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
194    gCamCapability[cameraId]->min_num_pp_bufs = 3;
195
196    pthread_cond_init(&mRequestCond, NULL);
197    mPendingRequest = 0;
198    mCurrentRequestId = -1;
199    pthread_mutex_init(&mMutex, NULL);
200
201    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
202        mDefaultMetadata[i] = NULL;
203
204#ifdef HAS_MULTIMEDIA_HINTS
205    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
206        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
207    }
208#endif
209}
210
211/*===========================================================================
212 * FUNCTION   : ~QCamera3HardwareInterface
213 *
214 * DESCRIPTION: destructor of QCamera3HardwareInterface
215 *
216 * PARAMETERS : none
217 *
218 * RETURN     : none
219 *==========================================================================*/
220QCamera3HardwareInterface::~QCamera3HardwareInterface()
221{
222    ALOGV("%s: E", __func__);
223    /* We need to stop all streams before deleting any stream */
224        /*flush the metadata list*/
225    if (!mStoredMetadataList.empty()) {
226        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
227              m != mStoredMetadataList.end(); m++) {
228            mMetadataChannel->bufDone(m->meta_buf);
229            free(m->meta_buf);
230            m = mStoredMetadataList.erase(m);
231        }
232    }
233
234    // NOTE: 'camera3_stream_t *' objects are already freed at
235    //        this stage by the framework
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (*it)->channel;
239        if (channel) {
240            channel->stop();
241        }
242    }
243
244    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
245        it != mStreamInfo.end(); it++) {
246        QCamera3Channel *channel = (*it)->channel;
247        if ((*it)->registered && (*it)->buffer_set.buffers) {
248             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
249        }
250        if (channel)
251            delete channel;
252        free (*it);
253    }
254
255    mPictureChannel = NULL;
256
257    if (mJpegSettings != NULL) {
258        free(mJpegSettings);
259        mJpegSettings = NULL;
260    }
261
262    /* Clean up all channels */
263    if (mCameraInitialized) {
264        if (mMetadataChannel) {
265            mMetadataChannel->stop();
266            delete mMetadataChannel;
267            mMetadataChannel = NULL;
268        }
269        deinitParameters();
270    }
271
272    if (mCameraOpened)
273        closeCamera();
274
275    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
276        if (mDefaultMetadata[i])
277            free_camera_metadata(mDefaultMetadata[i]);
278
279    pthread_cond_destroy(&mRequestCond);
280
281    pthread_mutex_destroy(&mMutex);
282    ALOGV("%s: X", __func__);
283}
284
285/*===========================================================================
286 * FUNCTION   : openCamera
287 *
288 * DESCRIPTION: open camera
289 *
290 * PARAMETERS :
291 *   @hw_device  : double ptr for camera device struct
292 *
293 * RETURN     : int32_t type of status
294 *              NO_ERROR  -- success
295 *              none-zero failure code
296 *==========================================================================*/
297int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
298{
299    int rc = 0;
300    pthread_mutex_lock(&mCameraSessionLock);
301    if (mCameraSessionActive) {
302        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
303        pthread_mutex_unlock(&mCameraSessionLock);
304        return -EUSERS;
305    }
306
307    if (mCameraOpened) {
308        *hw_device = NULL;
309        return PERMISSION_DENIED;
310    }
311
312    rc = openCamera();
313    if (rc == 0) {
314        *hw_device = &mCameraDevice.common;
315        mCameraSessionActive = 1;
316    } else
317        *hw_device = NULL;
318
319#ifdef HAS_MULTIMEDIA_HINTS
320    if (rc == 0) {
321        if (m_pPowerModule) {
322            if (m_pPowerModule->powerHint) {
323                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
324                        (void *)"state=1");
325            }
326        }
327    }
328#endif
329    pthread_mutex_unlock(&mCameraSessionLock);
330    return rc;
331}
332
333/*===========================================================================
334 * FUNCTION   : openCamera
335 *
336 * DESCRIPTION: open camera
337 *
338 * PARAMETERS : none
339 *
340 * RETURN     : int32_t type of status
341 *              NO_ERROR  -- success
342 *              none-zero failure code
343 *==========================================================================*/
344int QCamera3HardwareInterface::openCamera()
345{
346    if (mCameraHandle) {
347        ALOGE("Failure: Camera already opened");
348        return ALREADY_EXISTS;
349    }
350    mCameraHandle = camera_open(mCameraId);
351    if (!mCameraHandle) {
352        ALOGE("camera_open failed.");
353        return UNKNOWN_ERROR;
354    }
355
356    mCameraOpened = true;
357
358    return NO_ERROR;
359}
360
361/*===========================================================================
362 * FUNCTION   : closeCamera
363 *
364 * DESCRIPTION: close camera
365 *
366 * PARAMETERS : none
367 *
368 * RETURN     : int32_t type of status
369 *              NO_ERROR  -- success
370 *              none-zero failure code
371 *==========================================================================*/
372int QCamera3HardwareInterface::closeCamera()
373{
374    int rc = NO_ERROR;
375
376    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
377    mCameraHandle = NULL;
378    mCameraOpened = false;
379
380#ifdef HAS_MULTIMEDIA_HINTS
381    if (rc == NO_ERROR) {
382        if (m_pPowerModule) {
383            if (m_pPowerModule->powerHint) {
384                if(mHdrHint == true) {
385                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
386                            (void *)"state=3");
387                    mHdrHint = false;
388                }
389                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
390                        (void *)"state=0");
391            }
392        }
393    }
394#endif
395
396    return rc;
397}
398
399/*===========================================================================
400 * FUNCTION   : initialize
401 *
402 * DESCRIPTION: Initialize frameworks callback functions
403 *
404 * PARAMETERS :
405 *   @callback_ops : callback function to frameworks
406 *
407 * RETURN     :
408 *
409 *==========================================================================*/
410int QCamera3HardwareInterface::initialize(
411        const struct camera3_callback_ops *callback_ops)
412{
413    int rc;
414
415    pthread_mutex_lock(&mMutex);
416
417    rc = initParameters();
418    if (rc < 0) {
419        ALOGE("%s: initParamters failed %d", __func__, rc);
420       goto err1;
421    }
422    mCallbackOps = callback_ops;
423
424    pthread_mutex_unlock(&mMutex);
425    mCameraInitialized = true;
426    return 0;
427
428err1:
429    pthread_mutex_unlock(&mMutex);
430    return rc;
431}
432
433/*===========================================================================
434 * FUNCTION   : configureStreams
435 *
436 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
437 *              and output streams.
438 *
439 * PARAMETERS :
440 *   @stream_list : streams to be configured
441 *
442 * RETURN     :
443 *
444 *==========================================================================*/
445int QCamera3HardwareInterface::configureStreams(
446        camera3_stream_configuration_t *streamList)
447{
448    int rc = 0;
449    mIsZslMode = false;
450
451    // Sanity check stream_list
452    if (streamList == NULL) {
453        ALOGE("%s: NULL stream configuration", __func__);
454        return BAD_VALUE;
455    }
456    if (streamList->streams == NULL) {
457        ALOGE("%s: NULL stream list", __func__);
458        return BAD_VALUE;
459    }
460
461    if (streamList->num_streams < 1) {
462        ALOGE("%s: Bad number of streams requested: %d", __func__,
463                streamList->num_streams);
464        return BAD_VALUE;
465    }
466
467    /* first invalidate all the steams in the mStreamList
468     * if they appear again, they will be validated */
469    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
470            it != mStreamInfo.end(); it++) {
471        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
472        channel->stop();
473        (*it)->status = INVALID;
474    }
475    if (mMetadataChannel) {
476        /* If content of mStreamInfo is not 0, there is metadata stream */
477        mMetadataChannel->stop();
478    }
479
480#ifdef HAS_MULTIMEDIA_HINTS
481    if(mHdrHint == true) {
482        if (m_pPowerModule) {
483            if (m_pPowerModule->powerHint) {
484                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
485                        (void *)"state=3");
486                mHdrHint = false;
487            }
488        }
489    }
490#endif
491
492    pthread_mutex_lock(&mMutex);
493
494    camera3_stream_t *inputStream = NULL;
495    camera3_stream_t *jpegStream = NULL;
496    cam_stream_size_info_t stream_config_info;
497
498    for (size_t i = 0; i < streamList->num_streams; i++) {
499        camera3_stream_t *newStream = streamList->streams[i];
500        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
501                __func__, newStream->stream_type, newStream->format,
502                 newStream->width, newStream->height);
503        //if the stream is in the mStreamList validate it
504        bool stream_exists = false;
505        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
506                it != mStreamInfo.end(); it++) {
507            if ((*it)->stream == newStream) {
508                QCamera3Channel *channel =
509                    (QCamera3Channel*)(*it)->stream->priv;
510                stream_exists = true;
511                (*it)->status = RECONFIGURE;
512                /*delete the channel object associated with the stream because
513                  we need to reconfigure*/
514                delete channel;
515                (*it)->stream->priv = NULL;
516                (*it)->channel = NULL;
517            }
518        }
519        if (!stream_exists) {
520            //new stream
521            stream_info_t* stream_info;
522            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
523            stream_info->stream = newStream;
524            stream_info->status = VALID;
525            stream_info->registered = 0;
526            stream_info->channel = NULL;
527            mStreamInfo.push_back(stream_info);
528        }
529        if (newStream->stream_type == CAMERA3_STREAM_INPUT
530                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
531            if (inputStream != NULL) {
532                ALOGE("%s: Multiple input streams requested!", __func__);
533                pthread_mutex_unlock(&mMutex);
534                return BAD_VALUE;
535            }
536            inputStream = newStream;
537        }
538        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
539            jpegStream = newStream;
540        }
541    }
542    mInputStream = inputStream;
543
544    /*clean up invalid streams*/
545    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
546            it != mStreamInfo.end();) {
547        if(((*it)->status) == INVALID){
548            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
549            delete channel;
550            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
551            free(*it);
552            it = mStreamInfo.erase(it);
553        } else {
554            it++;
555        }
556    }
557    if (mMetadataChannel) {
558        delete mMetadataChannel;
559        mMetadataChannel = NULL;
560    }
561
562    //Create metadata channel and initialize it
563    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
564                    mCameraHandle->ops, captureResultCb,
565                    &gCamCapability[mCameraId]->padding_info, this);
566    if (mMetadataChannel == NULL) {
567        ALOGE("%s: failed to allocate metadata channel", __func__);
568        rc = -ENOMEM;
569        pthread_mutex_unlock(&mMutex);
570        return rc;
571    }
572    rc = mMetadataChannel->initialize();
573    if (rc < 0) {
574        ALOGE("%s: metadata channel initialization failed", __func__);
575        delete mMetadataChannel;
576        mMetadataChannel = NULL;
577        pthread_mutex_unlock(&mMutex);
578        return rc;
579    }
580
581    /* Allocate channel objects for the requested streams */
582    for (size_t i = 0; i < streamList->num_streams; i++) {
583        camera3_stream_t *newStream = streamList->streams[i];
584        uint32_t stream_usage = newStream->usage;
585        stream_config_info.stream_sizes[i].width = newStream->width;
586        stream_config_info.stream_sizes[i].height = newStream->height;
587        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
588            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
589            //for zsl stream the size is jpeg size
590            stream_config_info.stream_sizes[i].width = jpegStream->width;
591            stream_config_info.stream_sizes[i].height = jpegStream->height;
592            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
593        } else {
594           //for non zsl streams find out the format
595           switch (newStream->format) {
596           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
597              {
598                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
599                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
600                 } else {
601                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
602                 }
603              }
604              break;
605           case HAL_PIXEL_FORMAT_YCbCr_420_888:
606              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
607#ifdef HAS_MULTIMEDIA_HINTS
608              if (m_pPowerModule) {
609                  if (m_pPowerModule->powerHint) {
610                      m_pPowerModule->powerHint(m_pPowerModule,
611                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
612                      mHdrHint = true;
613                  }
614              }
615#endif
616              break;
617           case HAL_PIXEL_FORMAT_BLOB:
618              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
619              break;
620           default:
621              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
622              break;
623           }
624        }
625        if (newStream->priv == NULL) {
626            //New stream, construct channel
627            switch (newStream->stream_type) {
628            case CAMERA3_STREAM_INPUT:
629                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
630                break;
631            case CAMERA3_STREAM_BIDIRECTIONAL:
632                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
633                    GRALLOC_USAGE_HW_CAMERA_WRITE;
634                break;
635            case CAMERA3_STREAM_OUTPUT:
636                /* For video encoding stream, set read/write rarely
637                 * flag so that they may be set to un-cached */
638                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
639                    newStream->usage =
640                         (GRALLOC_USAGE_SW_READ_RARELY |
641                         GRALLOC_USAGE_SW_WRITE_RARELY |
642                         GRALLOC_USAGE_HW_CAMERA_WRITE);
643                else
644                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
645                break;
646            default:
647                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
648                break;
649            }
650
651            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
652                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
653                QCamera3Channel *channel;
654                switch (newStream->format) {
655                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
656                case HAL_PIXEL_FORMAT_YCbCr_420_888:
657                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
658                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
659                        jpegStream) {
660                        uint32_t width = jpegStream->width;
661                        uint32_t height = jpegStream->height;
662                        mIsZslMode = true;
663                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
664                            mCameraHandle->ops, captureResultCb,
665                            &gCamCapability[mCameraId]->padding_info, this, newStream,
666                            width, height);
667                    } else
668                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
669                            mCameraHandle->ops, captureResultCb,
670                            &gCamCapability[mCameraId]->padding_info, this, newStream);
671                    if (channel == NULL) {
672                        ALOGE("%s: allocation of channel failed", __func__);
673                        pthread_mutex_unlock(&mMutex);
674                        return -ENOMEM;
675                    }
676
677                    newStream->priv = channel;
678                    break;
679                case HAL_PIXEL_FORMAT_BLOB:
680                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
681                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
682                            mCameraHandle->ops, captureResultCb,
683                            &gCamCapability[mCameraId]->padding_info, this, newStream);
684                    if (mPictureChannel == NULL) {
685                        ALOGE("%s: allocation of channel failed", __func__);
686                        pthread_mutex_unlock(&mMutex);
687                        return -ENOMEM;
688                    }
689                    newStream->priv = (QCamera3Channel*)mPictureChannel;
690                    break;
691
692                //TODO: Add support for app consumed format?
693                default:
694                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
695                    break;
696                }
697            }
698
699            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
700                    it != mStreamInfo.end(); it++) {
701                if ((*it)->stream == newStream) {
702                    (*it)->channel = (QCamera3Channel*) newStream->priv;
703                    break;
704                }
705            }
706        } else {
707            // Channel already exists for this stream
708            // Do nothing for now
709        }
710    }
711
712    int32_t hal_version = CAM_HAL_V3;
713    stream_config_info.num_streams = streamList->num_streams;
714
715    // settings/parameters don't carry over for new configureStreams
716    memset(mParameters, 0, sizeof(parm_buffer_t));
717
718    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
719    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
720                sizeof(hal_version), &hal_version);
721
722    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
723                sizeof(stream_config_info), &stream_config_info);
724
725    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
726
727    /*For the streams to be reconfigured we need to register the buffers
728      since the framework wont*/
729    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
730            it != mStreamInfo.end(); it++) {
731        if ((*it)->status == RECONFIGURE) {
732            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
733            /*only register buffers for streams that have already been
734              registered*/
735            if ((*it)->registered) {
736                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
737                        (*it)->buffer_set.buffers);
738                if (rc != NO_ERROR) {
739                    ALOGE("%s: Failed to register the buffers of old stream,\
740                            rc = %d", __func__, rc);
741                }
742                ALOGV("%s: channel %p has %d buffers",
743                        __func__, channel, (*it)->buffer_set.num_buffers);
744            }
745        }
746
747        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
748        if (index == NAME_NOT_FOUND) {
749            mPendingBuffersMap.add((*it)->stream, 0);
750        } else {
751            mPendingBuffersMap.editValueAt(index) = 0;
752        }
753    }
754
755    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
756    mPendingRequestsList.clear();
757
758    mPendingFrameDropList.clear();
759
760    /*flush the metadata list*/
761    if (!mStoredMetadataList.empty()) {
762        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
763              m != mStoredMetadataList.end(); m++) {
764            mMetadataChannel->bufDone(m->meta_buf);
765            free(m->meta_buf);
766            m = mStoredMetadataList.erase(m);
767        }
768    }
769
770    mFirstRequest = true;
771
772    //Get min frame duration for this streams configuration
773    deriveMinFrameDuration();
774
775    pthread_mutex_unlock(&mMutex);
776    return rc;
777}
778
779/*===========================================================================
780 * FUNCTION   : validateCaptureRequest
781 *
782 * DESCRIPTION: validate a capture request from camera service
783 *
784 * PARAMETERS :
785 *   @request : request from framework to process
786 *
787 * RETURN     :
788 *
789 *==========================================================================*/
790int QCamera3HardwareInterface::validateCaptureRequest(
791                    camera3_capture_request_t *request)
792{
793    ssize_t idx = 0;
794    const camera3_stream_buffer_t *b;
795    CameraMetadata meta;
796
797    /* Sanity check the request */
798    if (request == NULL) {
799        ALOGE("%s: NULL capture request", __func__);
800        return BAD_VALUE;
801    }
802
803    uint32_t frameNumber = request->frame_number;
804    if (request->input_buffer != NULL &&
805            request->input_buffer->stream != mInputStream) {
806        ALOGE("%s: Request %d: Input buffer not from input stream!",
807                __FUNCTION__, frameNumber);
808        return BAD_VALUE;
809    }
810    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
811        ALOGE("%s: Request %d: No output buffers provided!",
812                __FUNCTION__, frameNumber);
813        return BAD_VALUE;
814    }
815    if (request->input_buffer != NULL) {
816        b = request->input_buffer;
817        QCamera3Channel *channel =
818            static_cast<QCamera3Channel*>(b->stream->priv);
819        if (channel == NULL) {
820            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
821                    __func__, frameNumber, idx);
822            return BAD_VALUE;
823        }
824        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
825            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
826                    __func__, frameNumber, idx);
827            return BAD_VALUE;
828        }
829        if (b->release_fence != -1) {
830            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
831                    __func__, frameNumber, idx);
832            return BAD_VALUE;
833        }
834        if (b->buffer == NULL) {
835            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
836                    __func__, frameNumber, idx);
837            return BAD_VALUE;
838        }
839    }
840
841    // Validate all buffers
842    b = request->output_buffers;
843    do {
844        QCamera3Channel *channel =
845                static_cast<QCamera3Channel*>(b->stream->priv);
846        if (channel == NULL) {
847            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
848                    __func__, frameNumber, idx);
849            return BAD_VALUE;
850        }
851        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
852            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
853                    __func__, frameNumber, idx);
854            return BAD_VALUE;
855        }
856        if (b->release_fence != -1) {
857            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
858                    __func__, frameNumber, idx);
859            return BAD_VALUE;
860        }
861        if (b->buffer == NULL) {
862            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
863                    __func__, frameNumber, idx);
864            return BAD_VALUE;
865        }
866        idx++;
867        b = request->output_buffers + idx;
868    } while (idx < (ssize_t)request->num_output_buffers);
869
870    return NO_ERROR;
871}
872
873/*===========================================================================
874 * FUNCTION   : deriveMinFrameDuration
875 *
876 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
877 *              on currently configured streams.
878 *
879 * PARAMETERS : NONE
880 *
881 * RETURN     : NONE
882 *
883 *==========================================================================*/
884void QCamera3HardwareInterface::deriveMinFrameDuration()
885{
886    int32_t maxJpegDimension, maxProcessedDimension;
887
888    maxJpegDimension = 0;
889    maxProcessedDimension = 0;
890
891    // Figure out maximum jpeg, processed, and raw dimensions
892    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
893        it != mStreamInfo.end(); it++) {
894
895        // Input stream doesn't have valid stream_type
896        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
897            continue;
898
899        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
900        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
901            if (dimension > maxJpegDimension)
902                maxJpegDimension = dimension;
903        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
904            if (dimension > maxProcessedDimension)
905                maxProcessedDimension = dimension;
906        }
907    }
908
909    //Assume all jpeg dimensions are in processed dimensions.
910    if (maxJpegDimension > maxProcessedDimension)
911        maxProcessedDimension = maxJpegDimension;
912
913    //Find minimum durations for processed, jpeg, and raw
914    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
915    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
916        if (maxProcessedDimension ==
917            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
918            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
919            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
920            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
921            break;
922        }
923    }
924}
925
926/*===========================================================================
927 * FUNCTION   : getMinFrameDuration
928 *
929 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
930 *              and current request configuration.
931 *
932 * PARAMETERS : @request: requset sent by the frameworks
933 *
934 * RETURN     : min farme duration for a particular request
935 *
936 *==========================================================================*/
937int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
938{
939    bool hasJpegStream = false;
940    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
941        const camera3_stream_t *stream = request->output_buffers[i].stream;
942        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
943            hasJpegStream = true;
944    }
945
946    if (!hasJpegStream)
947        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
948    else
949        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
950}
951
952/*===========================================================================
953 * FUNCTION   : handleMetadataWithLock
954 *
955 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
956 *
957 * PARAMETERS : @metadata_buf: metadata buffer
958 *
959 * RETURN     :
960 *
961 *==========================================================================*/
962void QCamera3HardwareInterface::handleMetadataWithLock(
963    mm_camera_super_buf_t *metadata_buf)
964{
965    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
966    int32_t frame_number_valid = *(int32_t *)
967        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
968    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
969        CAM_INTF_META_PENDING_REQUESTS, metadata);
970    uint32_t frame_number = *(uint32_t *)
971        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
972    const struct timeval *tv = (const struct timeval *)
973        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
974    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
975        tv->tv_usec * NSEC_PER_USEC;
976    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
977        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
978
979    int32_t urgent_frame_number_valid = *(int32_t *)
980        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
981    uint32_t urgent_frame_number = *(uint32_t *)
982        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
983
984    if (urgent_frame_number_valid) {
985        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
986          __func__, urgent_frame_number, capture_time);
987
988        //Recieved an urgent Frame Number, handle it
989        //using HAL3.1 quirk for partial results
990        for (List<PendingRequestInfo>::iterator i =
991            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
992            camera3_notify_msg_t notify_msg;
993            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
994                __func__, i->frame_number, urgent_frame_number);
995
996            if (i->frame_number < urgent_frame_number &&
997                i->bNotified == 0) {
998                notify_msg.type = CAMERA3_MSG_SHUTTER;
999                notify_msg.message.shutter.frame_number = i->frame_number;
1000                notify_msg.message.shutter.timestamp = capture_time -
1001                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1002                mCallbackOps->notify(mCallbackOps, &notify_msg);
1003                i->timestamp = notify_msg.message.shutter.timestamp;
1004                i->bNotified = 1;
1005                ALOGV("%s: Dummy notification !!!! notify frame_number = %d, capture_time = %lld",
1006                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1007            }
1008
1009            if (i->frame_number == urgent_frame_number) {
1010
1011                camera3_capture_result_t result;
1012
1013                // Send shutter notify to frameworks
1014                notify_msg.type = CAMERA3_MSG_SHUTTER;
1015                notify_msg.message.shutter.frame_number = i->frame_number;
1016                notify_msg.message.shutter.timestamp = capture_time;
1017                mCallbackOps->notify(mCallbackOps, &notify_msg);
1018
1019                i->timestamp = capture_time;
1020                i->bNotified = 1;
1021
1022                // Extract 3A metadata
1023                result.result =
1024                    translateCbUrgentMetadataToResultMetadata(metadata);
1025                // Populate metadata result
1026                result.frame_number = urgent_frame_number;
1027                result.num_output_buffers = 0;
1028                result.output_buffers = NULL;
1029                mCallbackOps->process_capture_result(mCallbackOps, &result);
1030                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1031                     __func__, result.frame_number, capture_time);
1032                free_camera_metadata((camera_metadata_t *)result.result);
1033                break;
1034            }
1035        }
1036    }
1037
1038    if (!frame_number_valid) {
1039        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1040        mMetadataChannel->bufDone(metadata_buf);
1041        free(metadata_buf);
1042        goto done_metadata;
1043    }
1044    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1045            frame_number, capture_time);
1046
1047    // Go through the pending requests info and send shutter/results to frameworks
1048    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1049        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1050        camera3_capture_result_t result;
1051        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1052
1053        // Flush out all entries with less or equal frame numbers.
1054        mPendingRequest--;
1055
1056        // Check whether any stream buffer corresponding to this is dropped or not
1057        // If dropped, then send the ERROR_BUFFER for the corresponding stream
1058        if (cam_frame_drop.frame_dropped) {
1059            camera3_notify_msg_t notify_msg;
1060            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1061                    j != i->buffers.end(); j++) {
1062                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1063                uint32_t streamTypeMask = channel->getStreamTypeMask();
1064                if (streamTypeMask & cam_frame_drop.stream_type_mask) {
1065                    // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1066                    ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
1067                           __func__, i->frame_number, streamTypeMask);
1068                    notify_msg.type = CAMERA3_MSG_ERROR;
1069                    notify_msg.message.error.frame_number = i->frame_number;
1070                    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1071                    notify_msg.message.error.error_stream = j->stream;
1072                    mCallbackOps->notify(mCallbackOps, &notify_msg);
1073                    ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
1074                           __func__, i->frame_number, streamTypeMask);
1075                    PendingFrameDropInfo PendingFrameDrop;
1076                    PendingFrameDrop.frame_number=i->frame_number;
1077                    PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
1078                    // Add the Frame drop info to mPendingFrameDropList
1079                    mPendingFrameDropList.push_back(PendingFrameDrop);
1080                }
1081            }
1082        }
1083
1084        // Send empty metadata with already filled buffers for dropped metadata
1085        // and send valid metadata with already filled buffers for current metadata
1086        if (i->frame_number < frame_number) {
1087            CameraMetadata dummyMetadata;
1088            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1089                    &i->timestamp, 1);
1090            dummyMetadata.update(ANDROID_REQUEST_ID,
1091                    &(i->request_id), 1);
1092            result.result = dummyMetadata.release();
1093        } else {
1094            result.result = translateCbMetadataToResultMetadata(metadata,
1095                    i->timestamp, i->request_id, i->blob_request,
1096                    &(i->input_jpeg_settings));
1097            if (mIsZslMode) {
1098                int found_metadata = 0;
1099                //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1100                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1101                    j != i->buffers.end(); j++) {
1102                    if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1103                        //check if corresp. zsl already exists in the stored metadata list
1104                        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1105                                m != mStoredMetadataList.begin(); m++) {
1106                            if (m->frame_number == frame_number) {
1107                                m->meta_buf = metadata_buf;
1108                                found_metadata = 1;
1109                                break;
1110                            }
1111                        }
1112                        if (!found_metadata) {
1113                            MetadataBufferInfo store_meta_info;
1114                            store_meta_info.meta_buf = metadata_buf;
1115                            store_meta_info.frame_number = frame_number;
1116                            mStoredMetadataList.push_back(store_meta_info);
1117                            found_metadata = 1;
1118                        }
1119                    }
1120                }
1121                if (!found_metadata) {
1122                    if (!i->input_buffer_present && i->blob_request) {
1123                        //livesnapshot or fallback non-zsl snapshot case
1124                        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1125                                j != i->buffers.end(); j++){
1126                            if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1127                                j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1128                                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1129                                break;
1130                            }
1131                        }
1132                    } else {
1133                        //return the metadata immediately
1134                        mMetadataChannel->bufDone(metadata_buf);
1135                        free(metadata_buf);
1136                    }
1137                }
1138            } else if (!mIsZslMode && i->blob_request) {
1139                //If it is a blob request then send the metadata to the picture channel
1140                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1141            } else {
1142                // Return metadata buffer
1143                mMetadataChannel->bufDone(metadata_buf);
1144                free(metadata_buf);
1145            }
1146        }
1147        if (!result.result) {
1148            ALOGE("%s: metadata is NULL", __func__);
1149        }
1150        result.frame_number = i->frame_number;
1151        result.num_output_buffers = 0;
1152        result.output_buffers = NULL;
1153        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1154                    j != i->buffers.end(); j++) {
1155            if (j->buffer) {
1156                result.num_output_buffers++;
1157            }
1158        }
1159
1160        if (result.num_output_buffers > 0) {
1161            camera3_stream_buffer_t *result_buffers =
1162                new camera3_stream_buffer_t[result.num_output_buffers];
1163            if (!result_buffers) {
1164                ALOGE("%s: Fatal error: out of memory", __func__);
1165            }
1166            size_t result_buffers_idx = 0;
1167            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1168                    j != i->buffers.end(); j++) {
1169                if (j->buffer) {
1170                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1171                            m != mPendingFrameDropList.end(); m++) {
1172                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1173                        uint32_t streamTypeMask = channel->getStreamTypeMask();
1174                        if((m->stream_type_mask & streamTypeMask) &&
1175                                (m->frame_number==frame_number)) {
1176                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1177                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1178                                  __func__, frame_number, streamTypeMask);
1179                            m = mPendingFrameDropList.erase(m);
1180                            break;
1181                        }
1182                    }
1183                    result_buffers[result_buffers_idx++] = *(j->buffer);
1184                    free(j->buffer);
1185                    j->buffer = NULL;
1186                    mPendingBuffersMap.editValueFor(j->stream)--;
1187                }
1188            }
1189            result.output_buffers = result_buffers;
1190
1191            mCallbackOps->process_capture_result(mCallbackOps, &result);
1192            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1193                    __func__, result.frame_number, i->timestamp);
1194            free_camera_metadata((camera_metadata_t *)result.result);
1195            delete[] result_buffers;
1196        } else {
1197            mCallbackOps->process_capture_result(mCallbackOps, &result);
1198            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1199                        __func__, result.frame_number, i->timestamp);
1200            free_camera_metadata((camera_metadata_t *)result.result);
1201        }
1202        // erase the element from the list
1203        i = mPendingRequestsList.erase(i);
1204    }
1205
1206done_metadata:
1207    if (!pending_requests)
1208        unblockRequestIfNecessary();
1209
1210}
1211
1212/*===========================================================================
1213 * FUNCTION   : handleBufferWithLock
1214 *
1215 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1216 *
1217 * PARAMETERS : @buffer: image buffer for the callback
1218 *              @frame_number: frame number of the image buffer
1219 *
1220 * RETURN     :
1221 *
1222 *==========================================================================*/
1223void QCamera3HardwareInterface::handleBufferWithLock(
1224    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1225{
1226    // If the frame number doesn't exist in the pending request list,
1227    // directly send the buffer to the frameworks, and update pending buffers map
1228    // Otherwise, book-keep the buffer.
1229    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1230    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1231        i++;
1232    }
1233    if (i == mPendingRequestsList.end()) {
1234        // Verify all pending requests frame_numbers are greater
1235        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1236                j != mPendingRequestsList.end(); j++) {
1237            if (j->frame_number < frame_number) {
1238                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1239                        __func__, j->frame_number, frame_number);
1240            }
1241        }
1242        camera3_capture_result_t result;
1243        result.result = NULL;
1244        result.frame_number = frame_number;
1245        result.num_output_buffers = 1;
1246        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1247                m != mPendingFrameDropList.end(); m++) {
1248            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1249            uint32_t streamTypeMask = channel->getStreamTypeMask();
1250            if((m->stream_type_mask & streamTypeMask) &&
1251                (m->frame_number==frame_number) ) {
1252                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1253                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1254                        __func__, frame_number, streamTypeMask);
1255                m = mPendingFrameDropList.erase(m);
1256                break;
1257            }
1258        }
1259        result.output_buffers = buffer;
1260        ALOGV("%s: result frame_number = %d, buffer = %p",
1261                __func__, frame_number, buffer);
1262        mPendingBuffersMap.editValueFor(buffer->stream)--;
1263        if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1264            int found = 0;
1265            for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1266                k != mStoredMetadataList.end(); k++) {
1267                if (k->frame_number == frame_number) {
1268                    k->zsl_buf_hdl = buffer->buffer;
1269                    found = 1;
1270                    break;
1271                }
1272            }
1273            if (!found) {
1274                MetadataBufferInfo meta_info;
1275                meta_info.frame_number = frame_number;
1276                meta_info.zsl_buf_hdl = buffer->buffer;
1277                mStoredMetadataList.push_back(meta_info);
1278            }
1279        }
1280        mCallbackOps->process_capture_result(mCallbackOps, &result);
1281    } else {
1282        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1283                j != i->buffers.end(); j++) {
1284            if (j->stream == buffer->stream) {
1285                if (j->buffer != NULL) {
1286                    ALOGE("%s: Error: buffer is already set", __func__);
1287                } else {
1288                    j->buffer = (camera3_stream_buffer_t *)malloc(
1289                            sizeof(camera3_stream_buffer_t));
1290                    *(j->buffer) = *buffer;
1291                    ALOGV("%s: cache buffer %p at result frame_number %d",
1292                            __func__, buffer, frame_number);
1293                }
1294            }
1295        }
1296    }
1297}
1298
1299/*===========================================================================
1300 * FUNCTION   : unblockRequestIfNecessary
1301 *
1302 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1303 *              that mMutex is held when this function is called.
1304 *
1305 * PARAMETERS :
1306 *
1307 * RETURN     :
1308 *
1309 *==========================================================================*/
1310void QCamera3HardwareInterface::unblockRequestIfNecessary()
1311{
1312    bool max_buffers_dequeued = false;
1313    for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1314        const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1315        uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1316        if (queued_buffers == stream->max_buffers) {
1317            max_buffers_dequeued = true;
1318            break;
1319        }
1320    }
1321    if (!max_buffers_dequeued) {
1322        // Unblock process_capture_request
1323        pthread_cond_signal(&mRequestCond);
1324    }
1325}
1326
1327/*===========================================================================
1328 * FUNCTION   : registerStreamBuffers
1329 *
1330 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1331 *
1332 * PARAMETERS :
1333 *   @stream_list : streams to be configured
1334 *
1335 * RETURN     :
1336 *
1337 *==========================================================================*/
1338int QCamera3HardwareInterface::registerStreamBuffers(
1339        const camera3_stream_buffer_set_t *buffer_set)
1340{
1341    int rc = 0;
1342
1343    pthread_mutex_lock(&mMutex);
1344
1345    if (buffer_set == NULL) {
1346        ALOGE("%s: Invalid buffer_set parameter.", __func__);
1347        pthread_mutex_unlock(&mMutex);
1348        return -EINVAL;
1349    }
1350    if (buffer_set->stream == NULL) {
1351        ALOGE("%s: Invalid stream parameter.", __func__);
1352        pthread_mutex_unlock(&mMutex);
1353        return -EINVAL;
1354    }
1355    if (buffer_set->num_buffers < 1) {
1356        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
1357        pthread_mutex_unlock(&mMutex);
1358        return -EINVAL;
1359    }
1360    if (buffer_set->buffers == NULL) {
1361        ALOGE("%s: Invalid buffers parameter.", __func__);
1362        pthread_mutex_unlock(&mMutex);
1363        return -EINVAL;
1364    }
1365
1366    camera3_stream_t *stream = buffer_set->stream;
1367    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
1368
1369    //set the buffer_set in the mStreamInfo array
1370    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1371            it != mStreamInfo.end(); it++) {
1372        if ((*it)->stream == stream) {
1373            uint32_t numBuffers = buffer_set->num_buffers;
1374            (*it)->buffer_set.stream = buffer_set->stream;
1375            (*it)->buffer_set.num_buffers = numBuffers;
1376            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
1377            if ((*it)->buffer_set.buffers == NULL) {
1378                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
1379                pthread_mutex_unlock(&mMutex);
1380                return -ENOMEM;
1381            }
1382            for (size_t j = 0; j < numBuffers; j++){
1383                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
1384            }
1385            (*it)->registered = 1;
1386        }
1387    }
1388    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
1389    if (rc < 0) {
1390        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
1391        pthread_mutex_unlock(&mMutex);
1392        return -ENODEV;
1393    }
1394
1395    pthread_mutex_unlock(&mMutex);
1396    return NO_ERROR;
1397}
1398
1399/*===========================================================================
1400 * FUNCTION   : processCaptureRequest
1401 *
1402 * DESCRIPTION: process a capture request from camera service
1403 *
1404 * PARAMETERS :
1405 *   @request : request from framework to process
1406 *
1407 * RETURN     :
1408 *
1409 *==========================================================================*/
1410int QCamera3HardwareInterface::processCaptureRequest(
1411                    camera3_capture_request_t *request)
1412{
1413    int rc = NO_ERROR;
1414    int32_t request_id;
1415    CameraMetadata meta;
1416    MetadataBufferInfo reproc_meta;
1417    int queueMetadata = 0;
1418
1419    pthread_mutex_lock(&mMutex);
1420
1421    rc = validateCaptureRequest(request);
1422    if (rc != NO_ERROR) {
1423        ALOGE("%s: incoming request is not valid", __func__);
1424        pthread_mutex_unlock(&mMutex);
1425        return rc;
1426    }
1427
1428    meta = request->settings;
1429
1430    // For first capture request, send capture intent, and
1431    // stream on all streams
1432    if (mFirstRequest) {
1433
1434        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1435            int32_t hal_version = CAM_HAL_V3;
1436            uint8_t captureIntent =
1437                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1438
1439            memset(mParameters, 0, sizeof(parm_buffer_t));
1440            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1441            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1442                sizeof(hal_version), &hal_version);
1443            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1444                sizeof(captureIntent), &captureIntent);
1445            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1446                mParameters);
1447        }
1448
1449        mMetadataChannel->start();
1450        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1451            it != mStreamInfo.end(); it++) {
1452            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1453            channel->start();
1454        }
1455    }
1456
1457    uint32_t frameNumber = request->frame_number;
1458    uint32_t streamTypeMask = 0;
1459
1460    if (meta.exists(ANDROID_REQUEST_ID)) {
1461        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1462        mCurrentRequestId = request_id;
1463        ALOGV("%s: Received request with id: %d",__func__, request_id);
1464    } else if (mFirstRequest || mCurrentRequestId == -1){
1465        ALOGE("%s: Unable to find request id field, \
1466                & no previous id available", __func__);
1467        return NAME_NOT_FOUND;
1468    } else {
1469        ALOGV("%s: Re-using old request id", __func__);
1470        request_id = mCurrentRequestId;
1471    }
1472
1473    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1474                                    __func__, __LINE__,
1475                                    request->num_output_buffers,
1476                                    request->input_buffer,
1477                                    frameNumber);
1478    // Acquire all request buffers first
1479    int blob_request = 0;
1480    for (size_t i = 0; i < request->num_output_buffers; i++) {
1481        const camera3_stream_buffer_t& output = request->output_buffers[i];
1482        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1483        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1484
1485        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1486        //Call function to store local copy of jpeg data for encode params.
1487            blob_request = 1;
1488            rc = getJpegSettings(request->settings);
1489            if (rc < 0) {
1490                ALOGE("%s: failed to get jpeg parameters", __func__);
1491                pthread_mutex_unlock(&mMutex);
1492                return rc;
1493            }
1494        }
1495
1496        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1497        if (rc != OK) {
1498            ALOGE("%s: fence wait failed %d", __func__, rc);
1499            pthread_mutex_unlock(&mMutex);
1500            return rc;
1501        }
1502        streamTypeMask |= channel->getStreamTypeMask();
1503    }
1504
1505    rc = setFrameParameters(request, streamTypeMask);
1506    if (rc < 0) {
1507        ALOGE("%s: fail to set frame parameters", __func__);
1508        pthread_mutex_unlock(&mMutex);
1509        return rc;
1510    }
1511
1512    /* Update pending request list and pending buffers map */
1513    PendingRequestInfo pendingRequest;
1514    pendingRequest.frame_number = frameNumber;
1515    pendingRequest.num_buffers = request->num_output_buffers;
1516    pendingRequest.request_id = request_id;
1517    pendingRequest.blob_request = blob_request;
1518    pendingRequest.bNotified = 0;
1519    if (blob_request)
1520        pendingRequest.input_jpeg_settings = *mJpegSettings;
1521    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1522
1523    for (size_t i = 0; i < request->num_output_buffers; i++) {
1524        RequestedBufferInfo requestedBuf;
1525        requestedBuf.stream = request->output_buffers[i].stream;
1526        requestedBuf.buffer = NULL;
1527        pendingRequest.buffers.push_back(requestedBuf);
1528
1529        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1530    }
1531    mPendingRequestsList.push_back(pendingRequest);
1532
1533    // Notify metadata channel we receive a request
1534    mMetadataChannel->request(NULL, frameNumber);
1535
1536    // Call request on other streams
1537    for (size_t i = 0; i < request->num_output_buffers; i++) {
1538        const camera3_stream_buffer_t& output = request->output_buffers[i];
1539        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1540        mm_camera_buf_def_t *pInputBuffer = NULL;
1541
1542        if (channel == NULL) {
1543            ALOGE("%s: invalid channel pointer for stream", __func__);
1544            continue;
1545        }
1546
1547        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1548            QCamera3RegularChannel* inputChannel = NULL;
1549            if(request->input_buffer != NULL){
1550                //Try to get the internal format
1551                inputChannel = (QCamera3RegularChannel*)
1552                    request->input_buffer->stream->priv;
1553                if(inputChannel == NULL ){
1554                    ALOGE("%s: failed to get input channel handle", __func__);
1555                } else {
1556                    pInputBuffer =
1557                        inputChannel->getInternalFormatBuffer(
1558                                request->input_buffer->buffer);
1559                    ALOGD("%s: Input buffer dump",__func__);
1560                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1561                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1562                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1563                    ALOGD("Handle:%p", request->input_buffer->buffer);
1564                    //TODO: need to get corresponding metadata and send it to pproc
1565                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1566                         m != mStoredMetadataList.end(); m++) {
1567                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1568                            reproc_meta.meta_buf = m->meta_buf;
1569                            queueMetadata = 1;
1570                            break;
1571                        }
1572                    }
1573                }
1574            }
1575            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1576                            pInputBuffer,(QCamera3Channel*)inputChannel);
1577            if (queueMetadata) {
1578                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1579            }
1580        } else {
1581            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1582                __LINE__, output.buffer, frameNumber);
1583            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1584                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1585                     m != mStoredMetadataList.end(); m++) {
1586                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1587                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1588                            mMetadataChannel->bufDone(m->meta_buf);
1589                            free(m->meta_buf);
1590                            m = mStoredMetadataList.erase(m);
1591                            break;
1592                        }
1593                   }
1594                }
1595            }
1596            rc = channel->request(output.buffer, frameNumber);
1597        }
1598        if (rc < 0)
1599            ALOGE("%s: request failed", __func__);
1600    }
1601
1602    mFirstRequest = false;
1603    // Added a timed condition wait
1604    struct timespec ts;
1605    uint8_t isValidTimeout = 1;
1606    rc = clock_gettime(CLOCK_REALTIME, &ts);
1607    if (rc < 0) {
1608        isValidTimeout = 0;
1609        ALOGE("%s: Error reading the real time clock!!", __func__);
1610    }
1611    else {
1612        // Make timeout as 5 sec for request to be honored
1613        ts.tv_sec += 5;
1614    }
1615    //Block on conditional variable
1616    mPendingRequest++;
1617    do {
1618        if (!isValidTimeout) {
1619            ALOGV("%s: Blocking on conditional wait", __func__);
1620            pthread_cond_wait(&mRequestCond, &mMutex);
1621        }
1622        else {
1623            ALOGV("%s: Blocking on timed conditional wait", __func__);
1624            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1625            if (rc == ETIMEDOUT) {
1626                rc = -ENODEV;
1627                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1628                break;
1629            }
1630        }
1631        ALOGV("%s: Unblocked", __func__);
1632    }while (mPendingRequest >= kMaxInFlight);
1633
1634    pthread_mutex_unlock(&mMutex);
1635
1636    return rc;
1637}
1638
1639/*===========================================================================
1640 * FUNCTION   : getMetadataVendorTagOps
1641 *
1642 * DESCRIPTION:
1643 *
1644 * PARAMETERS :
1645 *
1646 *
1647 * RETURN     :
1648 *==========================================================================*/
1649void QCamera3HardwareInterface::getMetadataVendorTagOps(
1650                    vendor_tag_query_ops_t* /*ops*/)
1651{
1652    /* Enable locks when we eventually add Vendor Tags */
1653    /*
1654    pthread_mutex_lock(&mMutex);
1655
1656    pthread_mutex_unlock(&mMutex);
1657    */
1658    return;
1659}
1660
1661/*===========================================================================
1662 * FUNCTION   : dump
1663 *
1664 * DESCRIPTION:
1665 *
1666 * PARAMETERS :
1667 *
1668 *
1669 * RETURN     :
1670 *==========================================================================*/
1671void QCamera3HardwareInterface::dump(int /*fd*/)
1672{
1673    /*Enable lock when we implement this function*/
1674    /*
1675    pthread_mutex_lock(&mMutex);
1676
1677    pthread_mutex_unlock(&mMutex);
1678    */
1679    return;
1680}
1681
1682/*===========================================================================
1683 * FUNCTION   : flush
1684 *
1685 * DESCRIPTION:
1686 *
1687 * PARAMETERS :
1688 *
1689 *
1690 * RETURN     :
1691 *==========================================================================*/
1692int QCamera3HardwareInterface::flush()
1693{
1694    /*Enable lock when we implement this function*/
1695    /*
1696    pthread_mutex_lock(&mMutex);
1697
1698    pthread_mutex_unlock(&mMutex);
1699    */
1700    return 0;
1701}
1702
1703/*===========================================================================
1704 * FUNCTION   : captureResultCb
1705 *
1706 * DESCRIPTION: Callback handler for all capture result
1707 *              (streams, as well as metadata)
1708 *
1709 * PARAMETERS :
1710 *   @metadata : metadata information
1711 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1712 *               NULL if metadata.
1713 *
1714 * RETURN     : NONE
1715 *==========================================================================*/
1716void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1717                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1718{
1719    pthread_mutex_lock(&mMutex);
1720
1721    if (metadata_buf)
1722        handleMetadataWithLock(metadata_buf);
1723    else
1724        handleBufferWithLock(buffer, frame_number);
1725
1726    pthread_mutex_unlock(&mMutex);
1727    return;
1728}
1729
1730/*===========================================================================
1731 * FUNCTION   : translateCbMetadataToResultMetadata
1732 *
1733 * DESCRIPTION:
1734 *
1735 * PARAMETERS :
1736 *   @metadata : metadata information from callback
1737 *
1738 * RETURN     : camera_metadata_t*
1739 *              metadata in a format specified by fwk
1740 *==========================================================================*/
1741camera_metadata_t*
1742QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1743                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1744                                 int32_t request_id, int32_t BlobRequest,
1745                                 jpeg_settings_t* inputjpegsettings)
1746{
1747    CameraMetadata camMetadata;
1748    camera_metadata_t* resultMetadata;
1749
1750    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1751    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1752
1753    // Update the JPEG related info
1754    if (BlobRequest) {
1755        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1756        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1757
1758        int32_t thumbnailSizeTable[2];
1759        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1760        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1761        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1762        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1763               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1764
1765        if (inputjpegsettings->gps_coordinates[0]) {
1766            double gpsCoordinates[3];
1767            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1768            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1769            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1770            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1771            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1772                 gpsCoordinates[1],gpsCoordinates[2]);
1773        }
1774
1775        if (inputjpegsettings->gps_timestamp) {
1776            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1777            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1778        }
1779
1780        String8 str(inputjpegsettings->gps_processing_method);
1781        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1782            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1783        }
1784    }
1785    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1786    uint8_t next_entry;
1787    while (curr_entry != CAM_INTF_PARM_MAX) {
1788       switch (curr_entry) {
1789         case CAM_INTF_META_FACE_DETECTION:{
1790             cam_face_detection_data_t *faceDetectionInfo =
1791                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1792             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1793             int32_t faceIds[MAX_ROI];
1794             uint8_t faceScores[MAX_ROI];
1795             int32_t faceRectangles[MAX_ROI * 4];
1796             int32_t faceLandmarks[MAX_ROI * 6];
1797             int j = 0, k = 0;
1798             for (int i = 0; i < numFaces; i++) {
1799                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1800                 faceScores[i] = faceDetectionInfo->faces[i].score;
1801                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1802                         faceRectangles+j, -1);
1803                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1804                 j+= 4;
1805                 k+= 6;
1806             }
1807
1808             if (numFaces <= 0) {
1809                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1810                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1811                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1812                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1813             }
1814
1815             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1816             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1817             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1818               faceRectangles, numFaces*4);
1819             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1820               faceLandmarks, numFaces*6);
1821
1822            break;
1823            }
1824         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1825             uint8_t  *color_correct_mode =
1826                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1827             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1828             break;
1829          }
1830
1831         // 3A state is sent in urgent partial result (uses quirk)
1832         case CAM_INTF_META_AEC_PRECAPTURE_ID:
1833         case CAM_INTF_META_AEC_ROI:
1834         case CAM_INTF_META_AEC_STATE:
1835         case CAM_INTF_PARM_FOCUS_MODE:
1836         case CAM_INTF_META_AF_ROI:
1837         case CAM_INTF_META_AF_STATE:
1838         case CAM_INTF_META_AF_TRIGGER_ID:
1839         case CAM_INTF_PARM_WHITE_BALANCE:
1840         case CAM_INTF_META_AWB_REGIONS:
1841         case CAM_INTF_META_AWB_STATE:
1842         case CAM_INTF_META_MODE: {
1843           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
1844           break;
1845         }
1846
1847          case CAM_INTF_META_EDGE_MODE: {
1848             cam_edge_application_t  *edgeApplication =
1849                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1850             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
1851             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
1852             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
1853             break;
1854          }
1855          case CAM_INTF_META_FLASH_POWER: {
1856             uint8_t  *flashPower =
1857                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1858             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1859             break;
1860          }
1861          case CAM_INTF_META_FLASH_FIRING_TIME: {
1862             int64_t  *flashFiringTime =
1863                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1864             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1865             break;
1866          }
1867          case CAM_INTF_META_FLASH_STATE: {
1868             uint8_t  *flashState =
1869                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1870             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1871             break;
1872          }
1873          case CAM_INTF_META_FLASH_MODE:{
1874             uint8_t *flashMode = (uint8_t*)
1875                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1876             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1877             break;
1878          }
1879          case CAM_INTF_META_HOTPIXEL_MODE: {
1880              uint8_t  *hotPixelMode =
1881                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1882              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1883              break;
1884          }
1885          case CAM_INTF_META_LENS_APERTURE:{
1886             float  *lensAperture =
1887                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1888             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1889             break;
1890          }
1891          case CAM_INTF_META_LENS_FILTERDENSITY: {
1892             float  *filterDensity =
1893                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1894             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1895             break;
1896          }
1897          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1898             float  *focalLength =
1899                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1900             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1901             break;
1902          }
1903          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1904             float  *focusDistance =
1905                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1906             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1907             break;
1908          }
1909          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1910             float  *focusRange =
1911                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1912             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1913             break;
1914          }
1915          case CAM_INTF_META_LENS_STATE: {
1916             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
1917             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
1918             break;
1919          }
1920          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1921             uint8_t  *opticalStab =
1922                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1923             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1924             break;
1925          }
1926          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1927             uint8_t  *noiseRedMode =
1928                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1929             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1930             break;
1931          }
1932          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
1933             uint8_t  *noiseRedStrength =
1934                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
1935             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
1936             break;
1937          }
1938          case CAM_INTF_META_SCALER_CROP_REGION: {
1939             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1940             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1941             int32_t scalerCropRegion[4];
1942             scalerCropRegion[0] = hScalerCropRegion->left;
1943             scalerCropRegion[1] = hScalerCropRegion->top;
1944             scalerCropRegion[2] = hScalerCropRegion->width;
1945             scalerCropRegion[3] = hScalerCropRegion->height;
1946             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1947             break;
1948          }
1949          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1950             int64_t  *sensorExpTime =
1951                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1952             mMetadataResponse.exposure_time = *sensorExpTime;
1953             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1954             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1955             break;
1956          }
1957          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1958             int64_t  *sensorFameDuration =
1959                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1960             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1961             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1962             break;
1963          }
1964          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1965             int32_t  *sensorSensitivity =
1966                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1967             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1968             mMetadataResponse.iso_speed = *sensorSensitivity;
1969             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1970             break;
1971          }
1972          case CAM_INTF_META_SHADING_MODE: {
1973             uint8_t  *shadingMode =
1974                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1975             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1976             break;
1977          }
1978          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1979             uint8_t  *faceDetectMode =
1980                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1981             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1982                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1983                                                        *faceDetectMode);
1984             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1985             break;
1986          }
1987          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1988             uint8_t  *histogramMode =
1989                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1990             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1991             break;
1992          }
1993          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1994               uint8_t  *sharpnessMapMode =
1995                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1996               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1997                                  sharpnessMapMode, 1);
1998               break;
1999           }
2000          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2001               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2002               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2003               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2004                                  (int32_t*)sharpnessMap->sharpness,
2005                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2006               break;
2007          }
2008          case CAM_INTF_META_LENS_SHADING_MAP: {
2009               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2010               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2011               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2012               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2013               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2014                                  (float*)lensShadingMap->lens_shading,
2015                                  4*map_width*map_height);
2016               break;
2017          }
2018          case CAM_INTF_META_TONEMAP_CURVES:{
2019             //Populate CAM_INTF_META_TONEMAP_CURVES
2020             /* ch0 = G, ch 1 = B, ch 2 = R*/
2021             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2022             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2023             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2024                                (float*)tonemap->curves[0].tonemap_points,
2025                                tonemap->tonemap_points_cnt * 2);
2026
2027             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2028                                (float*)tonemap->curves[1].tonemap_points,
2029                                tonemap->tonemap_points_cnt * 2);
2030
2031             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2032                                (float*)tonemap->curves[2].tonemap_points,
2033                                tonemap->tonemap_points_cnt * 2);
2034             break;
2035          }
2036          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2037             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2038             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2039             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2040             break;
2041          }
2042          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2043              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2044              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2045              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2046                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2047              break;
2048          }
2049          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2050             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2051             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2052             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2053                       predColorCorrectionGains->gains, 4);
2054             break;
2055          }
2056          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2057             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2058                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2059             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2060                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2061             break;
2062
2063          }
2064          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2065             uint8_t *blackLevelLock = (uint8_t*)
2066               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2067             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2068             break;
2069          }
2070          case CAM_INTF_META_SCENE_FLICKER:{
2071             uint8_t *sceneFlicker = (uint8_t*)
2072             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2073             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2074             break;
2075          }
2076          case CAM_INTF_PARM_LED_MODE:
2077             break;
2078          case CAM_INTF_PARM_EFFECT: {
2079             uint8_t *effectMode = (uint8_t*)
2080                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2081             uint8_t fwk_effectMode = lookupFwkName(EFFECT_MODES_MAP,
2082                                                    sizeof(EFFECT_MODES_MAP),
2083                                                    *effectMode);
2084             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2085             break;
2086          }
2087          default:
2088             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2089                   __func__, curr_entry);
2090             break;
2091       }
2092       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2093       curr_entry = next_entry;
2094    }
2095    resultMetadata = camMetadata.release();
2096    return resultMetadata;
2097}
2098
2099/*===========================================================================
2100 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2101 *
2102 * DESCRIPTION:
2103 *
2104 * PARAMETERS :
2105 *   @metadata : metadata information from callback
2106 *
2107 * RETURN     : camera_metadata_t*
2108 *              metadata in a format specified by fwk
2109 *==========================================================================*/
2110camera_metadata_t*
2111QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2112                                (metadata_buffer_t *metadata) {
2113
2114    CameraMetadata camMetadata;
2115    camera_metadata_t* resultMetadata;
2116
2117    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2118    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2119
2120    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2121    uint8_t next_entry;
2122    while (curr_entry != CAM_INTF_PARM_MAX) {
2123      switch (curr_entry) {
2124        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2125            int32_t  *ae_precapture_id =
2126              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2127            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2128                                          ae_precapture_id, 1);
2129            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2130          break;
2131        }
2132        case CAM_INTF_META_AEC_ROI: {
2133            cam_area_t  *hAeRegions =
2134                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2135            int32_t aeRegions[5];
2136            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2137            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2138            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
2139            break;
2140        }
2141        case CAM_INTF_META_AEC_STATE:{
2142            uint8_t *ae_state =
2143                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2144            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2145            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2146            break;
2147        }
2148        case CAM_INTF_PARM_FOCUS_MODE:{
2149            uint8_t  *focusMode =
2150                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2151            uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
2152               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2153            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2154            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2155            break;
2156        }
2157        case CAM_INTF_META_AF_ROI:{
2158            /*af regions*/
2159            cam_area_t  *hAfRegions =
2160                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2161            int32_t afRegions[5];
2162            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2163            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2164            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
2165            break;
2166        }
2167        case CAM_INTF_META_AF_STATE: {
2168            uint8_t  *afState =
2169               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2170            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2171            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2172            break;
2173        }
2174        case CAM_INTF_META_AF_TRIGGER_ID: {
2175            int32_t  *afTriggerId =
2176                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2177            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2178            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2179            break;
2180        }
2181        case CAM_INTF_PARM_WHITE_BALANCE: {
2182           uint8_t  *whiteBalance =
2183                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2184             uint8_t fwkWhiteBalanceMode =
2185                    lookupFwkName(WHITE_BALANCE_MODES_MAP,
2186                    sizeof(WHITE_BALANCE_MODES_MAP)/
2187                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2188             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2189                 &fwkWhiteBalanceMode, 1);
2190            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2191             break;
2192        }
2193        case CAM_INTF_META_AWB_REGIONS: {
2194           /*awb regions*/
2195           cam_area_t  *hAwbRegions =
2196               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2197           int32_t awbRegions[5];
2198           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2199           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2200           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2201           break;
2202        }
2203        case CAM_INTF_META_AWB_STATE: {
2204           uint8_t  *whiteBalanceState =
2205              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2206           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2207           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2208           break;
2209        }
2210        case CAM_INTF_META_MODE: {
2211            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2212            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2213            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
2214            break;
2215        }
2216        default:
2217            ALOGV("%s: Normal Metadata %d, do not process",
2218              __func__, curr_entry);
2219       }
2220       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2221       curr_entry = next_entry;
2222    }
2223    resultMetadata = camMetadata.release();
2224    return resultMetadata;
2225}
2226
2227/*===========================================================================
2228 * FUNCTION   : convertToRegions
2229 *
2230 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2231 *
2232 * PARAMETERS :
2233 *   @rect   : cam_rect_t struct to convert
2234 *   @region : int32_t destination array
2235 *   @weight : if we are converting from cam_area_t, weight is valid
2236 *             else weight = -1
2237 *
2238 *==========================================================================*/
2239void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2240    region[0] = rect.left;
2241    region[1] = rect.top;
2242    region[2] = rect.left + rect.width;
2243    region[3] = rect.top + rect.height;
2244    if (weight > -1) {
2245        region[4] = weight;
2246    }
2247}
2248
2249/*===========================================================================
2250 * FUNCTION   : convertFromRegions
2251 *
2252 * DESCRIPTION: helper method to convert from array to cam_rect_t
2253 *
2254 * PARAMETERS :
2255 *   @rect   : cam_rect_t struct to convert
2256 *   @region : int32_t destination array
2257 *   @weight : if we are converting from cam_area_t, weight is valid
2258 *             else weight = -1
2259 *
2260 *==========================================================================*/
2261void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2262                                                   const camera_metadata_t *settings,
2263                                                   uint32_t tag){
2264    CameraMetadata frame_settings;
2265    frame_settings = settings;
2266    int32_t x_min = frame_settings.find(tag).data.i32[0];
2267    int32_t y_min = frame_settings.find(tag).data.i32[1];
2268    int32_t x_max = frame_settings.find(tag).data.i32[2];
2269    int32_t y_max = frame_settings.find(tag).data.i32[3];
2270    roi->weight = frame_settings.find(tag).data.i32[4];
2271    roi->rect.left = x_min;
2272    roi->rect.top = y_min;
2273    roi->rect.width = x_max - x_min;
2274    roi->rect.height = y_max - y_min;
2275}
2276
2277/*===========================================================================
2278 * FUNCTION   : resetIfNeededROI
2279 *
2280 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2281 *              crop region
2282 *
2283 * PARAMETERS :
2284 *   @roi       : cam_area_t struct to resize
2285 *   @scalerCropRegion : cam_crop_region_t region to compare against
2286 *
2287 *
2288 *==========================================================================*/
2289bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2290                                                 const cam_crop_region_t* scalerCropRegion)
2291{
2292    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2293    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2294    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2295    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2296    if ((roi_x_max < scalerCropRegion->left) ||
2297        (roi_y_max < scalerCropRegion->top)  ||
2298        (roi->rect.left > crop_x_max) ||
2299        (roi->rect.top > crop_y_max)){
2300        return false;
2301    }
2302    if (roi->rect.left < scalerCropRegion->left) {
2303        roi->rect.left = scalerCropRegion->left;
2304    }
2305    if (roi->rect.top < scalerCropRegion->top) {
2306        roi->rect.top = scalerCropRegion->top;
2307    }
2308    if (roi_x_max > crop_x_max) {
2309        roi_x_max = crop_x_max;
2310    }
2311    if (roi_y_max > crop_y_max) {
2312        roi_y_max = crop_y_max;
2313    }
2314    roi->rect.width = roi_x_max - roi->rect.left;
2315    roi->rect.height = roi_y_max - roi->rect.top;
2316    return true;
2317}
2318
2319/*===========================================================================
2320 * FUNCTION   : convertLandmarks
2321 *
2322 * DESCRIPTION: helper method to extract the landmarks from face detection info
2323 *
2324 * PARAMETERS :
2325 *   @face   : cam_rect_t struct to convert
2326 *   @landmarks : int32_t destination array
2327 *
2328 *
2329 *==========================================================================*/
2330void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2331{
2332    landmarks[0] = face.left_eye_center.x;
2333    landmarks[1] = face.left_eye_center.y;
2334    landmarks[2] = face.right_eye_center.x;
2335    landmarks[3] = face.right_eye_center.y;
2336    landmarks[4] = face.mouth_center.x;
2337    landmarks[5] = face.mouth_center.y;
2338}
2339
2340#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2341/*===========================================================================
2342 * FUNCTION   : initCapabilities
2343 *
2344 * DESCRIPTION: initialize camera capabilities in static data struct
2345 *
2346 * PARAMETERS :
2347 *   @cameraId  : camera Id
2348 *
2349 * RETURN     : int32_t type of status
2350 *              NO_ERROR  -- success
2351 *              none-zero failure code
2352 *==========================================================================*/
2353int QCamera3HardwareInterface::initCapabilities(int cameraId)
2354{
2355    int rc = 0;
2356    mm_camera_vtbl_t *cameraHandle = NULL;
2357    QCamera3HeapMemory *capabilityHeap = NULL;
2358
2359    cameraHandle = camera_open(cameraId);
2360    if (!cameraHandle) {
2361        ALOGE("%s: camera_open failed", __func__);
2362        rc = -1;
2363        goto open_failed;
2364    }
2365
2366    capabilityHeap = new QCamera3HeapMemory();
2367    if (capabilityHeap == NULL) {
2368        ALOGE("%s: creation of capabilityHeap failed", __func__);
2369        goto heap_creation_failed;
2370    }
2371    /* Allocate memory for capability buffer */
2372    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2373    if(rc != OK) {
2374        ALOGE("%s: No memory for cappability", __func__);
2375        goto allocate_failed;
2376    }
2377
2378    /* Map memory for capability buffer */
2379    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2380    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2381                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2382                                capabilityHeap->getFd(0),
2383                                sizeof(cam_capability_t));
2384    if(rc < 0) {
2385        ALOGE("%s: failed to map capability buffer", __func__);
2386        goto map_failed;
2387    }
2388
2389    /* Query Capability */
2390    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2391    if(rc < 0) {
2392        ALOGE("%s: failed to query capability",__func__);
2393        goto query_failed;
2394    }
2395    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2396    if (!gCamCapability[cameraId]) {
2397        ALOGE("%s: out of memory", __func__);
2398        goto query_failed;
2399    }
2400    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2401                                        sizeof(cam_capability_t));
2402    rc = 0;
2403
2404query_failed:
2405    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2406                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2407map_failed:
2408    capabilityHeap->deallocate();
2409allocate_failed:
2410    delete capabilityHeap;
2411heap_creation_failed:
2412    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2413    cameraHandle = NULL;
2414open_failed:
2415    return rc;
2416}
2417
2418/*===========================================================================
2419 * FUNCTION   : initParameters
2420 *
2421 * DESCRIPTION: initialize camera parameters
2422 *
2423 * PARAMETERS :
2424 *
2425 * RETURN     : int32_t type of status
2426 *              NO_ERROR  -- success
2427 *              none-zero failure code
2428 *==========================================================================*/
2429int QCamera3HardwareInterface::initParameters()
2430{
2431    int rc = 0;
2432
2433    //Allocate Set Param Buffer
2434    mParamHeap = new QCamera3HeapMemory();
2435    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2436    if(rc != OK) {
2437        rc = NO_MEMORY;
2438        ALOGE("Failed to allocate SETPARM Heap memory");
2439        delete mParamHeap;
2440        mParamHeap = NULL;
2441        return rc;
2442    }
2443
2444    //Map memory for parameters buffer
2445    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2446            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2447            mParamHeap->getFd(0),
2448            sizeof(parm_buffer_t));
2449    if(rc < 0) {
2450        ALOGE("%s:failed to map SETPARM buffer",__func__);
2451        rc = FAILED_TRANSACTION;
2452        mParamHeap->deallocate();
2453        delete mParamHeap;
2454        mParamHeap = NULL;
2455        return rc;
2456    }
2457
2458    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2459    return rc;
2460}
2461
2462/*===========================================================================
2463 * FUNCTION   : deinitParameters
2464 *
2465 * DESCRIPTION: de-initialize camera parameters
2466 *
2467 * PARAMETERS :
2468 *
2469 * RETURN     : NONE
2470 *==========================================================================*/
2471void QCamera3HardwareInterface::deinitParameters()
2472{
2473    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2474            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2475
2476    mParamHeap->deallocate();
2477    delete mParamHeap;
2478    mParamHeap = NULL;
2479
2480    mParameters = NULL;
2481}
2482
2483/*===========================================================================
2484 * FUNCTION   : calcMaxJpegSize
2485 *
2486 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2487 *
2488 * PARAMETERS :
2489 *
2490 * RETURN     : max_jpeg_size
2491 *==========================================================================*/
2492int QCamera3HardwareInterface::calcMaxJpegSize()
2493{
2494    int32_t max_jpeg_size = 0;
2495    int temp_width, temp_height;
2496    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2497        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2498        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2499        if (temp_width * temp_height > max_jpeg_size ) {
2500            max_jpeg_size = temp_width * temp_height;
2501        }
2502    }
2503    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2504    return max_jpeg_size;
2505}
2506
2507/*===========================================================================
2508 * FUNCTION   : initStaticMetadata
2509 *
2510 * DESCRIPTION: initialize the static metadata
2511 *
2512 * PARAMETERS :
2513 *   @cameraId  : camera Id
2514 *
2515 * RETURN     : int32_t type of status
2516 *              0  -- success
2517 *              non-zero failure code
2518 *==========================================================================*/
2519int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2520{
2521    int rc = 0;
2522    CameraMetadata staticInfo;
2523
2524    /* android.info: hardware level */
2525    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2526    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2527        &supportedHardwareLevel, 1);
2528
2529    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2530    /*HAL 3 only*/
2531    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2532                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2533
2534    /*hard coded for now but this should come from sensor*/
2535    float min_focus_distance;
2536    if(facingBack){
2537        min_focus_distance = 10;
2538    } else {
2539        min_focus_distance = 0;
2540    }
2541    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2542                    &min_focus_distance, 1);
2543
2544    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2545                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2546
2547    /*should be using focal lengths but sensor doesn't provide that info now*/
2548    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2549                      &gCamCapability[cameraId]->focal_length,
2550                      1);
2551
2552    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2553                      gCamCapability[cameraId]->apertures,
2554                      gCamCapability[cameraId]->apertures_count);
2555
2556    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2557                gCamCapability[cameraId]->filter_densities,
2558                gCamCapability[cameraId]->filter_densities_count);
2559
2560
2561    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2562                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2563                      gCamCapability[cameraId]->optical_stab_modes_count);
2564
2565    staticInfo.update(ANDROID_LENS_POSITION,
2566                      gCamCapability[cameraId]->lens_position,
2567                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2568
2569    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2570                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2571    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2572                      lens_shading_map_size,
2573                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2574
2575    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2576                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2577    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2578            geo_correction_map_size,
2579            sizeof(geo_correction_map_size)/sizeof(int32_t));
2580
2581    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2582                       gCamCapability[cameraId]->geo_correction_map,
2583                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2584
2585    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2586            gCamCapability[cameraId]->sensor_physical_size, 2);
2587
2588    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2589            gCamCapability[cameraId]->exposure_time_range, 2);
2590
2591    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2592            &gCamCapability[cameraId]->max_frame_duration, 1);
2593
2594    camera_metadata_rational baseGainFactor = {
2595            gCamCapability[cameraId]->base_gain_factor.numerator,
2596            gCamCapability[cameraId]->base_gain_factor.denominator};
2597    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2598                      &baseGainFactor, 1);
2599
2600    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2601                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2602
2603    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2604                                               gCamCapability[cameraId]->pixel_array_size.height};
2605    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2606                      pixel_array_size, 2);
2607
2608    int32_t active_array_size[] = {0, 0,
2609                                                gCamCapability[cameraId]->active_array_size.width,
2610                                                gCamCapability[cameraId]->active_array_size.height};
2611    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2612                      active_array_size, 4);
2613
2614    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2615            &gCamCapability[cameraId]->white_level, 1);
2616
2617    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2618            gCamCapability[cameraId]->black_level_pattern, 4);
2619
2620    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2621                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2622
2623    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2624                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2625
2626    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2627                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2628
2629    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2630                      &gCamCapability[cameraId]->histogram_size, 1);
2631
2632    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2633            &gCamCapability[cameraId]->max_histogram_count, 1);
2634
2635    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2636                                                gCamCapability[cameraId]->sharpness_map_size.height};
2637
2638    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2639            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2640
2641    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2642            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2643
2644
2645    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2646                      &gCamCapability[cameraId]->raw_min_duration,
2647                       1);
2648
2649    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2650                                                HAL_PIXEL_FORMAT_BLOB};
2651    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2652    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2653                      scalar_formats,
2654                      scalar_formats_count);
2655
2656    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2657    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2658              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2659              available_processed_sizes);
2660    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2661                available_processed_sizes,
2662                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2663
2664    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2665                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2666                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2667
2668    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2669    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2670                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2671                 available_fps_ranges);
2672    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2673            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2674
2675    camera_metadata_rational exposureCompensationStep = {
2676            gCamCapability[cameraId]->exp_compensation_step.numerator,
2677            gCamCapability[cameraId]->exp_compensation_step.denominator};
2678    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2679                      &exposureCompensationStep, 1);
2680
2681    /*TO DO*/
2682    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2683    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2684                      availableVstabModes, sizeof(availableVstabModes));
2685
2686    /** Quirk for urgent 3A state until final interface is worked out */
2687    uint8_t usePartialResultQuirk = 1;
2688    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
2689                      &usePartialResultQuirk, 1);
2690
2691    /*HAL 1 and HAL 3 common*/
2692    float maxZoom = 4;
2693    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2694            &maxZoom, 1);
2695
2696    int32_t max3aRegions = 1;
2697    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2698            &max3aRegions, 1);
2699
2700    uint8_t availableFaceDetectModes[] = {
2701            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2702            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2703    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2704                      availableFaceDetectModes,
2705                      sizeof(availableFaceDetectModes));
2706
2707    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2708                                                        gCamCapability[cameraId]->exposure_compensation_max};
2709    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2710            exposureCompensationRange,
2711            sizeof(exposureCompensationRange)/sizeof(int32_t));
2712
2713    uint8_t lensFacing = (facingBack) ?
2714            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2715    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2716
2717    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2718                available_processed_sizes,
2719                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2720
2721    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2722                      available_thumbnail_sizes,
2723                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2724
2725    int32_t max_jpeg_size = 0;
2726    int temp_width, temp_height;
2727    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2728        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2729        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2730        if (temp_width * temp_height > max_jpeg_size ) {
2731            max_jpeg_size = temp_width * temp_height;
2732        }
2733    }
2734    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2735    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2736                      &max_jpeg_size, 1);
2737
2738    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2739    int32_t size = 0;
2740    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2741        int val = lookupFwkName(EFFECT_MODES_MAP,
2742                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2743                                   gCamCapability[cameraId]->supported_effects[i]);
2744        if (val != NAME_NOT_FOUND) {
2745            avail_effects[size] = (uint8_t)val;
2746            size++;
2747        }
2748    }
2749    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2750                      avail_effects,
2751                      size);
2752
2753    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2754    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2755    int32_t supported_scene_modes_cnt = 0;
2756    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2757        int val = lookupFwkName(SCENE_MODES_MAP,
2758                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2759                                gCamCapability[cameraId]->supported_scene_modes[i]);
2760        if (val != NAME_NOT_FOUND) {
2761            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2762            supported_indexes[supported_scene_modes_cnt] = i;
2763            supported_scene_modes_cnt++;
2764        }
2765    }
2766
2767    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2768                      avail_scene_modes,
2769                      supported_scene_modes_cnt);
2770
2771    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2772    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2773                      supported_scene_modes_cnt,
2774                      scene_mode_overrides,
2775                      supported_indexes,
2776                      cameraId);
2777    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2778                      scene_mode_overrides,
2779                      supported_scene_modes_cnt*3);
2780
2781    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2782    size = 0;
2783    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2784        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2785                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2786                                 gCamCapability[cameraId]->supported_antibandings[i]);
2787        if (val != NAME_NOT_FOUND) {
2788            avail_antibanding_modes[size] = (uint8_t)val;
2789            size++;
2790        }
2791
2792    }
2793    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2794                      avail_antibanding_modes,
2795                      size);
2796
2797    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2798    size = 0;
2799    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2800        int val = lookupFwkName(FOCUS_MODES_MAP,
2801                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2802                                gCamCapability[cameraId]->supported_focus_modes[i]);
2803        if (val != NAME_NOT_FOUND) {
2804            avail_af_modes[size] = (uint8_t)val;
2805            size++;
2806        }
2807    }
2808    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2809                      avail_af_modes,
2810                      size);
2811
2812    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2813    size = 0;
2814    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2815        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2816                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2817                                    gCamCapability[cameraId]->supported_white_balances[i]);
2818        if (val != NAME_NOT_FOUND) {
2819            avail_awb_modes[size] = (uint8_t)val;
2820            size++;
2821        }
2822    }
2823    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2824                      avail_awb_modes,
2825                      size);
2826
2827    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2828    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2829      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2830
2831    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2832            available_flash_levels,
2833            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2834
2835
2836    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2837    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2838            &flashAvailable, 1);
2839
2840    uint8_t avail_ae_modes[5];
2841    size = 0;
2842    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2843        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2844        size++;
2845    }
2846    if (flashAvailable) {
2847        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2848        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2849        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2850    }
2851    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2852                      avail_ae_modes,
2853                      size);
2854
2855    int32_t sensitivity_range[2];
2856    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2857    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2858    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2859                      sensitivity_range,
2860                      sizeof(sensitivity_range) / sizeof(int32_t));
2861
2862    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2863                      &gCamCapability[cameraId]->max_analog_sensitivity,
2864                      1);
2865
2866    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2867                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2868                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2869
2870    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2871    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2872                      &sensor_orientation,
2873                      1);
2874
2875    int32_t max_output_streams[3] = {1, 3, 1};
2876    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2877                      max_output_streams,
2878                      3);
2879
2880    gStaticMetadata[cameraId] = staticInfo.release();
2881    return rc;
2882}
2883
2884/*===========================================================================
2885 * FUNCTION   : makeTable
2886 *
2887 * DESCRIPTION: make a table of sizes
2888 *
2889 * PARAMETERS :
2890 *
2891 *
2892 *==========================================================================*/
2893void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2894                                          int32_t* sizeTable)
2895{
2896    int j = 0;
2897    for (int i = 0; i < size; i++) {
2898        sizeTable[j] = dimTable[i].width;
2899        sizeTable[j+1] = dimTable[i].height;
2900        j+=2;
2901    }
2902}
2903
2904/*===========================================================================
2905 * FUNCTION   : makeFPSTable
2906 *
2907 * DESCRIPTION: make a table of fps ranges
2908 *
2909 * PARAMETERS :
2910 *
2911 *==========================================================================*/
2912void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2913                                          int32_t* fpsRangesTable)
2914{
2915    int j = 0;
2916    for (int i = 0; i < size; i++) {
2917        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2918        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2919        j+=2;
2920    }
2921}
2922
2923/*===========================================================================
2924 * FUNCTION   : makeOverridesList
2925 *
2926 * DESCRIPTION: make a list of scene mode overrides
2927 *
2928 * PARAMETERS :
2929 *
2930 *
2931 *==========================================================================*/
2932void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2933                                                  uint8_t size, uint8_t* overridesList,
2934                                                  uint8_t* supported_indexes,
2935                                                  int camera_id)
2936{
2937    /*daemon will give a list of overrides for all scene modes.
2938      However we should send the fwk only the overrides for the scene modes
2939      supported by the framework*/
2940    int j = 0, index = 0, supt = 0;
2941    uint8_t focus_override;
2942    for (int i = 0; i < size; i++) {
2943        supt = 0;
2944        index = supported_indexes[i];
2945        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2946        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2947                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2948                                                    overridesTable[index].awb_mode);
2949        focus_override = (uint8_t)overridesTable[index].af_mode;
2950        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2951           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2952              supt = 1;
2953              break;
2954           }
2955        }
2956        if (supt) {
2957           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2958                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2959                                              focus_override);
2960        } else {
2961           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2962        }
2963        j+=3;
2964    }
2965}
2966
2967/*===========================================================================
2968 * FUNCTION   : getPreviewHalPixelFormat
2969 *
2970 * DESCRIPTION: convert the format to type recognized by framework
2971 *
2972 * PARAMETERS : format : the format from backend
2973 *
2974 ** RETURN    : format recognized by framework
2975 *
2976 *==========================================================================*/
2977int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2978{
2979    int32_t halPixelFormat;
2980
2981    switch (format) {
2982    case CAM_FORMAT_YUV_420_NV12:
2983        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2984        break;
2985    case CAM_FORMAT_YUV_420_NV21:
2986        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2987        break;
2988    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2989        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2990        break;
2991    case CAM_FORMAT_YUV_420_YV12:
2992        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2993        break;
2994    case CAM_FORMAT_YUV_422_NV16:
2995    case CAM_FORMAT_YUV_422_NV61:
2996    default:
2997        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2998        break;
2999    }
3000    return halPixelFormat;
3001}
3002
3003/*===========================================================================
3004 * FUNCTION   : getSensorSensitivity
3005 *
3006 * DESCRIPTION: convert iso_mode to an integer value
3007 *
3008 * PARAMETERS : iso_mode : the iso_mode supported by sensor
3009 *
3010 ** RETURN    : sensitivity supported by sensor
3011 *
3012 *==========================================================================*/
3013int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
3014{
3015    int32_t sensitivity;
3016
3017    switch (iso_mode) {
3018    case CAM_ISO_MODE_100:
3019        sensitivity = 100;
3020        break;
3021    case CAM_ISO_MODE_200:
3022        sensitivity = 200;
3023        break;
3024    case CAM_ISO_MODE_400:
3025        sensitivity = 400;
3026        break;
3027    case CAM_ISO_MODE_800:
3028        sensitivity = 800;
3029        break;
3030    case CAM_ISO_MODE_1600:
3031        sensitivity = 1600;
3032        break;
3033    default:
3034        sensitivity = -1;
3035        break;
3036    }
3037    return sensitivity;
3038}
3039
3040
3041/*===========================================================================
3042 * FUNCTION   : AddSetParmEntryToBatch
3043 *
3044 * DESCRIPTION: add set parameter entry into batch
3045 *
3046 * PARAMETERS :
3047 *   @p_table     : ptr to parameter buffer
3048 *   @paramType   : parameter type
3049 *   @paramLength : length of parameter value
3050 *   @paramValue  : ptr to parameter value
3051 *
3052 * RETURN     : int32_t type of status
3053 *              NO_ERROR  -- success
3054 *              none-zero failure code
3055 *==========================================================================*/
3056int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
3057                                                          cam_intf_parm_type_t paramType,
3058                                                          uint32_t paramLength,
3059                                                          void *paramValue)
3060{
3061    int position = paramType;
3062    int current, next;
3063
3064    /*************************************************************************
3065    *                 Code to take care of linking next flags                *
3066    *************************************************************************/
3067    current = GET_FIRST_PARAM_ID(p_table);
3068    if (position == current){
3069        //DO NOTHING
3070    } else if (position < current){
3071        SET_NEXT_PARAM_ID(position, p_table, current);
3072        SET_FIRST_PARAM_ID(p_table, position);
3073    } else {
3074        /* Search for the position in the linked list where we need to slot in*/
3075        while (position > GET_NEXT_PARAM_ID(current, p_table))
3076            current = GET_NEXT_PARAM_ID(current, p_table);
3077
3078        /*If node already exists no need to alter linking*/
3079        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
3080            next = GET_NEXT_PARAM_ID(current, p_table);
3081            SET_NEXT_PARAM_ID(current, p_table, position);
3082            SET_NEXT_PARAM_ID(position, p_table, next);
3083        }
3084    }
3085
3086    /*************************************************************************
3087    *                   Copy contents into entry                             *
3088    *************************************************************************/
3089
3090    if (paramLength > sizeof(parm_type_t)) {
3091        ALOGE("%s:Size of input larger than max entry size",__func__);
3092        return BAD_VALUE;
3093    }
3094    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
3095    return NO_ERROR;
3096}
3097
3098/*===========================================================================
3099 * FUNCTION   : lookupFwkName
3100 *
3101 * DESCRIPTION: In case the enum is not same in fwk and backend
3102 *              make sure the parameter is correctly propogated
3103 *
3104 * PARAMETERS  :
3105 *   @arr      : map between the two enums
3106 *   @len      : len of the map
3107 *   @hal_name : name of the hal_parm to map
3108 *
3109 * RETURN     : int type of status
3110 *              fwk_name  -- success
3111 *              none-zero failure code
3112 *==========================================================================*/
3113int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
3114                                             int len, int hal_name)
3115{
3116
3117    for (int i = 0; i < len; i++) {
3118        if (arr[i].hal_name == hal_name)
3119            return arr[i].fwk_name;
3120    }
3121
3122    /* Not able to find matching framework type is not necessarily
3123     * an error case. This happens when mm-camera supports more attributes
3124     * than the frameworks do */
3125    ALOGD("%s: Cannot find matching framework type", __func__);
3126    return NAME_NOT_FOUND;
3127}
3128
3129/*===========================================================================
3130 * FUNCTION   : lookupHalName
3131 *
3132 * DESCRIPTION: In case the enum is not same in fwk and backend
3133 *              make sure the parameter is correctly propogated
3134 *
3135 * PARAMETERS  :
3136 *   @arr      : map between the two enums
3137 *   @len      : len of the map
3138 *   @fwk_name : name of the hal_parm to map
3139 *
3140 * RETURN     : int32_t type of status
3141 *              hal_name  -- success
3142 *              none-zero failure code
3143 *==========================================================================*/
3144int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
3145                                             int len, int fwk_name)
3146{
3147    for (int i = 0; i < len; i++) {
3148       if (arr[i].fwk_name == fwk_name)
3149           return arr[i].hal_name;
3150    }
3151    ALOGE("%s: Cannot find matching hal type", __func__);
3152    return NAME_NOT_FOUND;
3153}
3154
3155/*===========================================================================
3156 * FUNCTION   : getCapabilities
3157 *
3158 * DESCRIPTION: query camera capabilities
3159 *
3160 * PARAMETERS :
3161 *   @cameraId  : camera Id
3162 *   @info      : camera info struct to be filled in with camera capabilities
3163 *
3164 * RETURN     : int32_t type of status
3165 *              NO_ERROR  -- success
3166 *              none-zero failure code
3167 *==========================================================================*/
3168int QCamera3HardwareInterface::getCamInfo(int cameraId,
3169                                    struct camera_info *info)
3170{
3171    int rc = 0;
3172
3173    if (NULL == gCamCapability[cameraId]) {
3174        rc = initCapabilities(cameraId);
3175        if (rc < 0) {
3176            //pthread_mutex_unlock(&g_camlock);
3177            return rc;
3178        }
3179    }
3180
3181    if (NULL == gStaticMetadata[cameraId]) {
3182        rc = initStaticMetadata(cameraId);
3183        if (rc < 0) {
3184            return rc;
3185        }
3186    }
3187
3188    switch(gCamCapability[cameraId]->position) {
3189    case CAM_POSITION_BACK:
3190        info->facing = CAMERA_FACING_BACK;
3191        break;
3192
3193    case CAM_POSITION_FRONT:
3194        info->facing = CAMERA_FACING_FRONT;
3195        break;
3196
3197    default:
3198        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3199        rc = -1;
3200        break;
3201    }
3202
3203
3204    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3205    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
3206    info->static_camera_characteristics = gStaticMetadata[cameraId];
3207
3208    return rc;
3209}
3210
3211/*===========================================================================
3212 * FUNCTION   : translateMetadata
3213 *
3214 * DESCRIPTION: translate the metadata into camera_metadata_t
3215 *
3216 * PARAMETERS : type of the request
3217 *
3218 *
3219 * RETURN     : success: camera_metadata_t*
3220 *              failure: NULL
3221 *
3222 *==========================================================================*/
3223camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3224{
3225    pthread_mutex_lock(&mMutex);
3226
3227    if (mDefaultMetadata[type] != NULL) {
3228        pthread_mutex_unlock(&mMutex);
3229        return mDefaultMetadata[type];
3230    }
3231    //first time we are handling this request
3232    //fill up the metadata structure using the wrapper class
3233    CameraMetadata settings;
3234    //translate from cam_capability_t to camera_metadata_tag_t
3235    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3236    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3237    int32_t defaultRequestID = 0;
3238    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3239
3240    /*control*/
3241
3242    uint8_t controlIntent = 0;
3243    switch (type) {
3244      case CAMERA3_TEMPLATE_PREVIEW:
3245        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3246        break;
3247      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3248        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3249        break;
3250      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3251        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3252        break;
3253      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3254        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3255        break;
3256      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3257        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3258        break;
3259      default:
3260        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3261        break;
3262    }
3263    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3264
3265    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3266            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3267
3268    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3269    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3270
3271    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3272    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3273
3274    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3275    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3276
3277    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3278    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3279
3280    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3281    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3282
3283    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3284    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3285
3286    static uint8_t focusMode;
3287    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
3288        ALOGE("%s: Setting focus mode to auto", __func__);
3289        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
3290    } else {
3291        ALOGE("%s: Setting focus mode to off", __func__);
3292        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3293    }
3294    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3295
3296    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3297    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3298
3299    /*flash*/
3300    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3301    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3302
3303    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3304    settings.update(ANDROID_FLASH_FIRING_POWER,
3305            &flashFiringLevel, 1);
3306
3307    /* lens */
3308    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3309    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3310
3311    if (gCamCapability[mCameraId]->filter_densities_count) {
3312        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3313        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3314                        gCamCapability[mCameraId]->filter_densities_count);
3315    }
3316
3317    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3318    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3319
3320    /* Exposure time(Update the Min Exposure Time)*/
3321    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3322    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3323
3324    /* frame duration */
3325    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3326    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3327
3328    /* sensitivity */
3329    static const int32_t default_sensitivity = 100;
3330    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3331
3332    /*edge mode*/
3333    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3334    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3335
3336    /*noise reduction mode*/
3337    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3338    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3339
3340    /*color correction mode*/
3341    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3342    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3343
3344    /*transform matrix mode*/
3345    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3346    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3347
3348    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3349    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3350
3351    int32_t scaler_crop_region[4];
3352    scaler_crop_region[0] = 0;
3353    scaler_crop_region[1] = 0;
3354    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3355    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3356    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3357
3358    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3359    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3360
3361    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3362    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3363
3364    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3365                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3366                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3367    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3368
3369    mDefaultMetadata[type] = settings.release();
3370
3371    pthread_mutex_unlock(&mMutex);
3372    return mDefaultMetadata[type];
3373}
3374
3375/*===========================================================================
3376 * FUNCTION   : setFrameParameters
3377 *
3378 * DESCRIPTION: set parameters per frame as requested in the metadata from
3379 *              framework
3380 *
3381 * PARAMETERS :
3382 *   @request   : request that needs to be serviced
3383 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3384 *
3385 * RETURN     : success: NO_ERROR
3386 *              failure:
3387 *==========================================================================*/
3388int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3389                    uint32_t streamTypeMask)
3390{
3391    /*translate from camera_metadata_t type to parm_type_t*/
3392    int rc = 0;
3393    if (request->settings == NULL && mFirstRequest) {
3394        /*settings cannot be null for the first request*/
3395        return BAD_VALUE;
3396    }
3397
3398    int32_t hal_version = CAM_HAL_V3;
3399
3400    memset(mParameters, 0, sizeof(parm_buffer_t));
3401    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3402    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3403                sizeof(hal_version), &hal_version);
3404    if (rc < 0) {
3405        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3406        return BAD_VALUE;
3407    }
3408
3409    /*we need to update the frame number in the parameters*/
3410    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3411                                sizeof(request->frame_number), &(request->frame_number));
3412    if (rc < 0) {
3413        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3414        return BAD_VALUE;
3415    }
3416
3417    /* Update stream id mask where buffers are requested */
3418    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3419                                sizeof(streamTypeMask), &streamTypeMask);
3420    if (rc < 0) {
3421        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3422        return BAD_VALUE;
3423    }
3424
3425    if(request->settings != NULL){
3426        rc = translateMetadataToParameters(request);
3427    }
3428    /*set the parameters to backend*/
3429    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3430    return rc;
3431}
3432
3433/*===========================================================================
3434 * FUNCTION   : translateMetadataToParameters
3435 *
3436 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3437 *
3438 *
3439 * PARAMETERS :
3440 *   @request  : request sent from framework
3441 *
3442 *
3443 * RETURN     : success: NO_ERROR
3444 *              failure:
3445 *==========================================================================*/
3446int QCamera3HardwareInterface::translateMetadataToParameters
3447                                  (const camera3_capture_request_t *request)
3448{
3449    int rc = 0;
3450    CameraMetadata frame_settings;
3451    frame_settings = request->settings;
3452
3453    /* Do not change the order of the following list unless you know what you are
3454     * doing.
3455     * The order is laid out in such a way that parameters in the front of the table
3456     * may be used to override the parameters later in the table. Examples are:
3457     * 1. META_MODE should precede AEC/AWB/AF MODE
3458     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3459     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3460     * 4. Any mode should precede it's corresponding settings
3461     */
3462    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3463        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3464        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3465                sizeof(metaMode), &metaMode);
3466        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3467           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3468           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3469                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3470                                             fwk_sceneMode);
3471           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3472                sizeof(sceneMode), &sceneMode);
3473        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3474           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3475           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3476                sizeof(sceneMode), &sceneMode);
3477        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3478           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3479           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3480                sizeof(sceneMode), &sceneMode);
3481        }
3482    }
3483
3484    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3485        uint8_t fwk_aeMode =
3486            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3487        uint8_t aeMode;
3488        int32_t redeye;
3489
3490        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3491            aeMode = CAM_AE_MODE_OFF;
3492        } else {
3493            aeMode = CAM_AE_MODE_ON;
3494        }
3495        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3496            redeye = 1;
3497        } else {
3498            redeye = 0;
3499        }
3500
3501        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3502                                          sizeof(AE_FLASH_MODE_MAP),
3503                                          fwk_aeMode);
3504        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3505                sizeof(aeMode), &aeMode);
3506        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3507                sizeof(flashMode), &flashMode);
3508        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3509                sizeof(redeye), &redeye);
3510    }
3511
3512    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3513        uint8_t fwk_whiteLevel =
3514            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3515        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3516                sizeof(WHITE_BALANCE_MODES_MAP),
3517                fwk_whiteLevel);
3518        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3519                sizeof(whiteLevel), &whiteLevel);
3520    }
3521
3522    float focalDistance = -1.0;
3523    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3524        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3525        rc = AddSetParmEntryToBatch(mParameters,
3526                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3527                sizeof(focalDistance), &focalDistance);
3528    }
3529
3530    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3531        uint8_t fwk_focusMode =
3532            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3533        uint8_t focusMode;
3534        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3535            focusMode = CAM_FOCUS_MODE_INFINITY;
3536        } else{
3537         focusMode = lookupHalName(FOCUS_MODES_MAP,
3538                                   sizeof(FOCUS_MODES_MAP),
3539                                   fwk_focusMode);
3540        }
3541        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3542                sizeof(focusMode), &focusMode);
3543    }
3544
3545    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3546        int32_t antibandingMode =
3547            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3548        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3549                sizeof(antibandingMode), &antibandingMode);
3550    }
3551
3552    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3553        int32_t expCompensation = frame_settings.find(
3554            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3555        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3556            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3557        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3558            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3559        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3560          sizeof(expCompensation), &expCompensation);
3561    }
3562
3563    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3564        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3565        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3566                sizeof(aeLock), &aeLock);
3567    }
3568    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3569        cam_fps_range_t fps_range;
3570        fps_range.min_fps =
3571            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3572        fps_range.max_fps =
3573            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3574        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3575                sizeof(fps_range), &fps_range);
3576    }
3577
3578    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3579        uint8_t awbLock =
3580            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3581        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3582                sizeof(awbLock), &awbLock);
3583    }
3584
3585    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3586        uint8_t fwk_effectMode =
3587            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3588        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3589                sizeof(EFFECT_MODES_MAP),
3590                fwk_effectMode);
3591        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3592                sizeof(effectMode), &effectMode);
3593    }
3594
3595    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3596        uint8_t colorCorrectMode =
3597            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3598        rc =
3599            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3600                    sizeof(colorCorrectMode), &colorCorrectMode);
3601    }
3602
3603    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3604        cam_color_correct_gains_t colorCorrectGains;
3605        for (int i = 0; i < 4; i++) {
3606            colorCorrectGains.gains[i] =
3607                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3608        }
3609        rc =
3610            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3611                    sizeof(colorCorrectGains), &colorCorrectGains);
3612    }
3613
3614    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3615        cam_color_correct_matrix_t colorCorrectTransform;
3616        cam_rational_type_t transform_elem;
3617        int num = 0;
3618        for (int i = 0; i < 3; i++) {
3619           for (int j = 0; j < 3; j++) {
3620              transform_elem.numerator =
3621                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3622              transform_elem.denominator =
3623                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3624              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3625              num++;
3626           }
3627        }
3628        rc =
3629            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3630                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3631    }
3632
3633    cam_trigger_t aecTrigger;
3634    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3635    aecTrigger.trigger_id = -1;
3636    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3637        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3638        aecTrigger.trigger =
3639            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3640        aecTrigger.trigger_id =
3641            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3642    }
3643    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3644                                sizeof(aecTrigger), &aecTrigger);
3645
3646    /*af_trigger must come with a trigger id*/
3647    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3648        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3649        cam_trigger_t af_trigger;
3650        af_trigger.trigger =
3651            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3652        af_trigger.trigger_id =
3653            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3654        rc = AddSetParmEntryToBatch(mParameters,
3655                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3656    }
3657
3658    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3659        int32_t demosaic =
3660            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3661        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3662                sizeof(demosaic), &demosaic);
3663    }
3664
3665    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3666        cam_edge_application_t edge_application;
3667        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3668        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3669            edge_application.sharpness = 0;
3670        } else {
3671            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3672                uint8_t edgeStrength =
3673                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
3674                edge_application.sharpness = (int32_t)edgeStrength;
3675            } else {
3676                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3677            }
3678        }
3679        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3680                sizeof(edge_application), &edge_application);
3681    }
3682
3683    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3684        int32_t respectFlashMode = 1;
3685        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3686            uint8_t fwk_aeMode =
3687                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3688            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3689                respectFlashMode = 0;
3690                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
3691                    __func__);
3692            }
3693        }
3694        if (respectFlashMode) {
3695            uint8_t flashMode =
3696                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3697            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3698                                          sizeof(FLASH_MODES_MAP),
3699                                          flashMode);
3700            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
3701            // To check: CAM_INTF_META_FLASH_MODE usage
3702            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3703                          sizeof(flashMode), &flashMode);
3704        }
3705    }
3706
3707    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3708        uint8_t flashPower =
3709            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3710        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3711                sizeof(flashPower), &flashPower);
3712    }
3713
3714    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3715        int64_t flashFiringTime =
3716            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3717        rc = AddSetParmEntryToBatch(mParameters,
3718                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3719    }
3720
3721    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3722        uint8_t geometricMode =
3723            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3724        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3725                sizeof(geometricMode), &geometricMode);
3726    }
3727
3728    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3729        uint8_t geometricStrength =
3730            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3731        rc = AddSetParmEntryToBatch(mParameters,
3732                CAM_INTF_META_GEOMETRIC_STRENGTH,
3733                sizeof(geometricStrength), &geometricStrength);
3734    }
3735
3736    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3737        uint8_t hotPixelMode =
3738            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3739        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3740                sizeof(hotPixelMode), &hotPixelMode);
3741    }
3742
3743    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3744        float lensAperture =
3745            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3746        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3747                sizeof(lensAperture), &lensAperture);
3748    }
3749
3750    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3751        float filterDensity =
3752            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3753        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3754                sizeof(filterDensity), &filterDensity);
3755    }
3756
3757    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3758        float focalLength =
3759            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3760        rc = AddSetParmEntryToBatch(mParameters,
3761                CAM_INTF_META_LENS_FOCAL_LENGTH,
3762                sizeof(focalLength), &focalLength);
3763    }
3764
3765    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3766        uint8_t optStabMode =
3767            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3768        rc = AddSetParmEntryToBatch(mParameters,
3769                CAM_INTF_META_LENS_OPT_STAB_MODE,
3770                sizeof(optStabMode), &optStabMode);
3771    }
3772
3773    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3774        uint8_t noiseRedMode =
3775            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3776        rc = AddSetParmEntryToBatch(mParameters,
3777                CAM_INTF_META_NOISE_REDUCTION_MODE,
3778                sizeof(noiseRedMode), &noiseRedMode);
3779    }
3780
3781    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3782        uint8_t noiseRedStrength =
3783            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3784        rc = AddSetParmEntryToBatch(mParameters,
3785                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3786                sizeof(noiseRedStrength), &noiseRedStrength);
3787    }
3788
3789    cam_crop_region_t scalerCropRegion;
3790    bool scalerCropSet = false;
3791    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3792        scalerCropRegion.left =
3793            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3794        scalerCropRegion.top =
3795            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3796        scalerCropRegion.width =
3797            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3798        scalerCropRegion.height =
3799            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3800        rc = AddSetParmEntryToBatch(mParameters,
3801                CAM_INTF_META_SCALER_CROP_REGION,
3802                sizeof(scalerCropRegion), &scalerCropRegion);
3803        scalerCropSet = true;
3804    }
3805
3806    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3807        int64_t sensorExpTime =
3808            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3809        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3810        rc = AddSetParmEntryToBatch(mParameters,
3811                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3812                sizeof(sensorExpTime), &sensorExpTime);
3813    }
3814
3815    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3816        int64_t sensorFrameDuration =
3817            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3818        int64_t minFrameDuration = getMinFrameDuration(request);
3819        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3820        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3821            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3822        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3823        rc = AddSetParmEntryToBatch(mParameters,
3824                CAM_INTF_META_SENSOR_FRAME_DURATION,
3825                sizeof(sensorFrameDuration), &sensorFrameDuration);
3826    }
3827
3828    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3829        int32_t sensorSensitivity =
3830            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3831        if (sensorSensitivity <
3832                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3833            sensorSensitivity =
3834                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3835        if (sensorSensitivity >
3836                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3837            sensorSensitivity =
3838                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3839        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3840        rc = AddSetParmEntryToBatch(mParameters,
3841                CAM_INTF_META_SENSOR_SENSITIVITY,
3842                sizeof(sensorSensitivity), &sensorSensitivity);
3843    }
3844
3845    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3846        int32_t shadingMode =
3847            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3848        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3849                sizeof(shadingMode), &shadingMode);
3850    }
3851
3852    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3853        uint8_t shadingStrength =
3854            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3855        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3856                sizeof(shadingStrength), &shadingStrength);
3857    }
3858
3859    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3860        uint8_t fwk_facedetectMode =
3861            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3862        uint8_t facedetectMode =
3863            lookupHalName(FACEDETECT_MODES_MAP,
3864                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3865        rc = AddSetParmEntryToBatch(mParameters,
3866                CAM_INTF_META_STATS_FACEDETECT_MODE,
3867                sizeof(facedetectMode), &facedetectMode);
3868    }
3869
3870    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3871        uint8_t histogramMode =
3872            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3873        rc = AddSetParmEntryToBatch(mParameters,
3874                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3875                sizeof(histogramMode), &histogramMode);
3876    }
3877
3878    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3879        uint8_t sharpnessMapMode =
3880            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3881        rc = AddSetParmEntryToBatch(mParameters,
3882                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3883                sizeof(sharpnessMapMode), &sharpnessMapMode);
3884    }
3885
3886    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3887        uint8_t tonemapMode =
3888            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3889        rc = AddSetParmEntryToBatch(mParameters,
3890                CAM_INTF_META_TONEMAP_MODE,
3891                sizeof(tonemapMode), &tonemapMode);
3892    }
3893    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3894    /*All tonemap channels will have the same number of points*/
3895    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3896        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3897        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3898        cam_rgb_tonemap_curves tonemapCurves;
3899        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3900
3901        /* ch0 = G*/
3902        int point = 0;
3903        cam_tonemap_curve_t tonemapCurveGreen;
3904        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3905            for (int j = 0; j < 2; j++) {
3906               tonemapCurveGreen.tonemap_points[i][j] =
3907                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3908               point++;
3909            }
3910        }
3911        tonemapCurves.curves[0] = tonemapCurveGreen;
3912
3913        /* ch 1 = B */
3914        point = 0;
3915        cam_tonemap_curve_t tonemapCurveBlue;
3916        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3917            for (int j = 0; j < 2; j++) {
3918               tonemapCurveBlue.tonemap_points[i][j] =
3919                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3920               point++;
3921            }
3922        }
3923        tonemapCurves.curves[1] = tonemapCurveBlue;
3924
3925        /* ch 2 = R */
3926        point = 0;
3927        cam_tonemap_curve_t tonemapCurveRed;
3928        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3929            for (int j = 0; j < 2; j++) {
3930               tonemapCurveRed.tonemap_points[i][j] =
3931                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3932               point++;
3933            }
3934        }
3935        tonemapCurves.curves[2] = tonemapCurveRed;
3936
3937        rc = AddSetParmEntryToBatch(mParameters,
3938                CAM_INTF_META_TONEMAP_CURVES,
3939                sizeof(tonemapCurves), &tonemapCurves);
3940    }
3941
3942    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3943        uint8_t captureIntent =
3944            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3945        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3946                sizeof(captureIntent), &captureIntent);
3947    }
3948
3949    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3950        uint8_t blackLevelLock =
3951            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3952        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3953                sizeof(blackLevelLock), &blackLevelLock);
3954    }
3955
3956    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3957        uint8_t lensShadingMapMode =
3958            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3959        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3960                sizeof(lensShadingMapMode), &lensShadingMapMode);
3961    }
3962
3963    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3964        cam_area_t roi;
3965        bool reset = true;
3966        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3967        if (scalerCropSet) {
3968            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3969        }
3970        if (reset) {
3971            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3972                    sizeof(roi), &roi);
3973        }
3974    }
3975
3976    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3977        cam_area_t roi;
3978        bool reset = true;
3979        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3980        if (scalerCropSet) {
3981            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3982        }
3983        if (reset) {
3984            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3985                    sizeof(roi), &roi);
3986        }
3987    }
3988
3989    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3990        cam_area_t roi;
3991        bool reset = true;
3992        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3993        if (scalerCropSet) {
3994            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3995        }
3996        if (reset) {
3997            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3998                    sizeof(roi), &roi);
3999        }
4000    }
4001    return rc;
4002}
4003
4004/*===========================================================================
4005 * FUNCTION   : getJpegSettings
4006 *
4007 * DESCRIPTION: save the jpeg settings in the HAL
4008 *
4009 *
4010 * PARAMETERS :
4011 *   @settings  : frame settings information from framework
4012 *
4013 *
4014 * RETURN     : success: NO_ERROR
4015 *              failure:
4016 *==========================================================================*/
4017int QCamera3HardwareInterface::getJpegSettings
4018                                  (const camera_metadata_t *settings)
4019{
4020    if (mJpegSettings) {
4021        if (mJpegSettings->gps_timestamp) {
4022            free(mJpegSettings->gps_timestamp);
4023            mJpegSettings->gps_timestamp = NULL;
4024        }
4025        if (mJpegSettings->gps_coordinates) {
4026            for (int i = 0; i < 3; i++) {
4027                free(mJpegSettings->gps_coordinates[i]);
4028                mJpegSettings->gps_coordinates[i] = NULL;
4029            }
4030        }
4031        free(mJpegSettings);
4032        mJpegSettings = NULL;
4033    }
4034    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
4035    CameraMetadata jpeg_settings;
4036    jpeg_settings = settings;
4037
4038    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
4039        mJpegSettings->jpeg_orientation =
4040            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
4041    } else {
4042        mJpegSettings->jpeg_orientation = 0;
4043    }
4044    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
4045        mJpegSettings->jpeg_quality =
4046            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
4047    } else {
4048        mJpegSettings->jpeg_quality = 85;
4049    }
4050    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
4051        mJpegSettings->thumbnail_size.width =
4052            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
4053        mJpegSettings->thumbnail_size.height =
4054            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
4055    } else {
4056        mJpegSettings->thumbnail_size.width = 0;
4057        mJpegSettings->thumbnail_size.height = 0;
4058    }
4059    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
4060        for (int i = 0; i < 3; i++) {
4061            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
4062            *(mJpegSettings->gps_coordinates[i]) =
4063                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
4064        }
4065    } else{
4066       for (int i = 0; i < 3; i++) {
4067            mJpegSettings->gps_coordinates[i] = NULL;
4068        }
4069    }
4070
4071    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
4072        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
4073        *(mJpegSettings->gps_timestamp) =
4074            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
4075    } else {
4076        mJpegSettings->gps_timestamp = NULL;
4077    }
4078
4079    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
4080        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
4081        for (int i = 0; i < len; i++) {
4082            mJpegSettings->gps_processing_method[i] =
4083                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
4084        }
4085        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
4086            mJpegSettings->gps_processing_method[len] = '\0';
4087        }
4088    } else {
4089        mJpegSettings->gps_processing_method[0] = '\0';
4090    }
4091
4092    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4093        mJpegSettings->sensor_sensitivity =
4094            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4095    } else {
4096        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
4097    }
4098
4099    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
4100
4101    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4102        mJpegSettings->lens_focal_length =
4103            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4104    }
4105    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4106        mJpegSettings->exposure_compensation =
4107            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4108    }
4109    mJpegSettings->sharpness = 10; //default value
4110    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
4111        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4112        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
4113            mJpegSettings->sharpness = 0;
4114        }
4115    }
4116    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
4117    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
4118    mJpegSettings->is_jpeg_format = true;
4119    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
4120    return 0;
4121}
4122
4123/*===========================================================================
4124 * FUNCTION   : captureResultCb
4125 *
4126 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
4127 *
4128 * PARAMETERS :
4129 *   @frame  : frame information from mm-camera-interface
4130 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
4131 *   @userdata: userdata
4132 *
4133 * RETURN     : NONE
4134 *==========================================================================*/
4135void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
4136                camera3_stream_buffer_t *buffer,
4137                uint32_t frame_number, void *userdata)
4138{
4139    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
4140    if (hw == NULL) {
4141        ALOGE("%s: Invalid hw %p", __func__, hw);
4142        return;
4143    }
4144
4145    hw->captureResultCb(metadata, buffer, frame_number);
4146    return;
4147}
4148
4149
4150/*===========================================================================
4151 * FUNCTION   : initialize
4152 *
4153 * DESCRIPTION: Pass framework callback pointers to HAL
4154 *
4155 * PARAMETERS :
4156 *
4157 *
4158 * RETURN     : Success : 0
4159 *              Failure: -ENODEV
4160 *==========================================================================*/
4161
4162int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
4163                                  const camera3_callback_ops_t *callback_ops)
4164{
4165    ALOGV("%s: E", __func__);
4166    QCamera3HardwareInterface *hw =
4167        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4168    if (!hw) {
4169        ALOGE("%s: NULL camera device", __func__);
4170        return -ENODEV;
4171    }
4172
4173    int rc = hw->initialize(callback_ops);
4174    ALOGV("%s: X", __func__);
4175    return rc;
4176}
4177
4178/*===========================================================================
4179 * FUNCTION   : configure_streams
4180 *
4181 * DESCRIPTION:
4182 *
4183 * PARAMETERS :
4184 *
4185 *
4186 * RETURN     : Success: 0
4187 *              Failure: -EINVAL (if stream configuration is invalid)
4188 *                       -ENODEV (fatal error)
4189 *==========================================================================*/
4190
4191int QCamera3HardwareInterface::configure_streams(
4192        const struct camera3_device *device,
4193        camera3_stream_configuration_t *stream_list)
4194{
4195    ALOGV("%s: E", __func__);
4196    QCamera3HardwareInterface *hw =
4197        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4198    if (!hw) {
4199        ALOGE("%s: NULL camera device", __func__);
4200        return -ENODEV;
4201    }
4202    int rc = hw->configureStreams(stream_list);
4203    ALOGV("%s: X", __func__);
4204    return rc;
4205}
4206
4207/*===========================================================================
4208 * FUNCTION   : register_stream_buffers
4209 *
4210 * DESCRIPTION: Register stream buffers with the device
4211 *
4212 * PARAMETERS :
4213 *
4214 * RETURN     :
4215 *==========================================================================*/
4216int QCamera3HardwareInterface::register_stream_buffers(
4217        const struct camera3_device *device,
4218        const camera3_stream_buffer_set_t *buffer_set)
4219{
4220    ALOGV("%s: E", __func__);
4221    QCamera3HardwareInterface *hw =
4222        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4223    if (!hw) {
4224        ALOGE("%s: NULL camera device", __func__);
4225        return -ENODEV;
4226    }
4227    int rc = hw->registerStreamBuffers(buffer_set);
4228    ALOGV("%s: X", __func__);
4229    return rc;
4230}
4231
4232/*===========================================================================
4233 * FUNCTION   : construct_default_request_settings
4234 *
4235 * DESCRIPTION: Configure a settings buffer to meet the required use case
4236 *
4237 * PARAMETERS :
4238 *
4239 *
4240 * RETURN     : Success: Return valid metadata
4241 *              Failure: Return NULL
4242 *==========================================================================*/
4243const camera_metadata_t* QCamera3HardwareInterface::
4244    construct_default_request_settings(const struct camera3_device *device,
4245                                        int type)
4246{
4247
4248    ALOGV("%s: E", __func__);
4249    camera_metadata_t* fwk_metadata = NULL;
4250    QCamera3HardwareInterface *hw =
4251        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4252    if (!hw) {
4253        ALOGE("%s: NULL camera device", __func__);
4254        return NULL;
4255    }
4256
4257    fwk_metadata = hw->translateCapabilityToMetadata(type);
4258
4259    ALOGV("%s: X", __func__);
4260    return fwk_metadata;
4261}
4262
4263/*===========================================================================
4264 * FUNCTION   : process_capture_request
4265 *
4266 * DESCRIPTION:
4267 *
4268 * PARAMETERS :
4269 *
4270 *
4271 * RETURN     :
4272 *==========================================================================*/
4273int QCamera3HardwareInterface::process_capture_request(
4274                    const struct camera3_device *device,
4275                    camera3_capture_request_t *request)
4276{
4277    ALOGV("%s: E", __func__);
4278    QCamera3HardwareInterface *hw =
4279        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4280    if (!hw) {
4281        ALOGE("%s: NULL camera device", __func__);
4282        return -EINVAL;
4283    }
4284
4285    int rc = hw->processCaptureRequest(request);
4286    ALOGV("%s: X", __func__);
4287    return rc;
4288}
4289
4290/*===========================================================================
4291 * FUNCTION   : get_metadata_vendor_tag_ops
4292 *
4293 * DESCRIPTION:
4294 *
4295 * PARAMETERS :
4296 *
4297 *
4298 * RETURN     :
4299 *==========================================================================*/
4300
4301void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4302                const struct camera3_device *device,
4303                vendor_tag_query_ops_t* ops)
4304{
4305    ALOGV("%s: E", __func__);
4306    QCamera3HardwareInterface *hw =
4307        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4308    if (!hw) {
4309        ALOGE("%s: NULL camera device", __func__);
4310        return;
4311    }
4312
4313    hw->getMetadataVendorTagOps(ops);
4314    ALOGV("%s: X", __func__);
4315    return;
4316}
4317
4318/*===========================================================================
4319 * FUNCTION   : dump
4320 *
4321 * DESCRIPTION:
4322 *
4323 * PARAMETERS :
4324 *
4325 *
4326 * RETURN     :
4327 *==========================================================================*/
4328
4329void QCamera3HardwareInterface::dump(
4330                const struct camera3_device *device, int fd)
4331{
4332    ALOGV("%s: E", __func__);
4333    QCamera3HardwareInterface *hw =
4334        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4335    if (!hw) {
4336        ALOGE("%s: NULL camera device", __func__);
4337        return;
4338    }
4339
4340    hw->dump(fd);
4341    ALOGV("%s: X", __func__);
4342    return;
4343}
4344
4345/*===========================================================================
4346 * FUNCTION   : flush
4347 *
4348 * DESCRIPTION:
4349 *
4350 * PARAMETERS :
4351 *
4352 *
4353 * RETURN     :
4354 *==========================================================================*/
4355
4356int QCamera3HardwareInterface::flush(
4357                const struct camera3_device *device)
4358{
4359    int rc;
4360    ALOGV("%s: E", __func__);
4361    QCamera3HardwareInterface *hw =
4362        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4363    if (!hw) {
4364        ALOGE("%s: NULL camera device", __func__);
4365        return -EINVAL;
4366    }
4367
4368    rc = hw->flush();
4369    ALOGV("%s: X", __func__);
4370    return rc;
4371}
4372
4373/*===========================================================================
4374 * FUNCTION   : close_camera_device
4375 *
4376 * DESCRIPTION:
4377 *
4378 * PARAMETERS :
4379 *
4380 *
4381 * RETURN     :
4382 *==========================================================================*/
4383int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4384{
4385    ALOGV("%s: E", __func__);
4386    int ret = NO_ERROR;
4387    QCamera3HardwareInterface *hw =
4388        reinterpret_cast<QCamera3HardwareInterface *>(
4389            reinterpret_cast<camera3_device_t *>(device)->priv);
4390    if (!hw) {
4391        ALOGE("NULL camera device");
4392        return BAD_VALUE;
4393    }
4394    delete hw;
4395
4396    pthread_mutex_lock(&mCameraSessionLock);
4397    mCameraSessionActive = 0;
4398    pthread_mutex_unlock(&mCameraSessionLock);
4399    ALOGV("%s: X", __func__);
4400    return ret;
4401}
4402
4403/*===========================================================================
4404 * FUNCTION   : getWaveletDenoiseProcessPlate
4405 *
4406 * DESCRIPTION: query wavelet denoise process plate
4407 *
4408 * PARAMETERS : None
4409 *
4410 * RETURN     : WNR prcocess plate vlaue
4411 *==========================================================================*/
4412cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4413{
4414    char prop[PROPERTY_VALUE_MAX];
4415    memset(prop, 0, sizeof(prop));
4416    property_get("persist.denoise.process.plates", prop, "0");
4417    int processPlate = atoi(prop);
4418    switch(processPlate) {
4419    case 0:
4420        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4421    case 1:
4422        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4423    case 2:
4424        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4425    case 3:
4426        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4427    default:
4428        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4429    }
4430}
4431
4432/*===========================================================================
4433 * FUNCTION   : needRotationReprocess
4434 *
4435 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4436 *
4437 * PARAMETERS : none
4438 *
4439 * RETURN     : true: needed
4440 *              false: no need
4441 *==========================================================================*/
4442bool QCamera3HardwareInterface::needRotationReprocess()
4443{
4444
4445    if (!mJpegSettings->is_jpeg_format) {
4446        // RAW image, no need to reprocess
4447        return false;
4448    }
4449
4450    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4451        mJpegSettings->jpeg_orientation > 0) {
4452        // current rotation is not zero, and pp has the capability to process rotation
4453        ALOGD("%s: need do reprocess for rotation", __func__);
4454        return true;
4455    }
4456
4457    return false;
4458}
4459
4460/*===========================================================================
4461 * FUNCTION   : needReprocess
4462 *
4463 * DESCRIPTION: if reprocess in needed
4464 *
4465 * PARAMETERS : none
4466 *
4467 * RETURN     : true: needed
4468 *              false: no need
4469 *==========================================================================*/
4470bool QCamera3HardwareInterface::needReprocess()
4471{
4472    if (!mJpegSettings->is_jpeg_format) {
4473        // RAW image, no need to reprocess
4474        return false;
4475    }
4476
4477    if ((mJpegSettings->min_required_pp_mask > 0) ||
4478         isWNREnabled()) {
4479        // TODO: add for ZSL HDR later
4480        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4481        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4482        return true;
4483    }
4484    return needRotationReprocess();
4485}
4486
4487/*===========================================================================
4488 * FUNCTION   : addOnlineReprocChannel
4489 *
4490 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4491 *              coming from input channel
4492 *
4493 * PARAMETERS :
4494 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4495 *
4496 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4497 *==========================================================================*/
4498QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4499              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4500{
4501    int32_t rc = NO_ERROR;
4502    QCamera3ReprocessChannel *pChannel = NULL;
4503    if (pInputChannel == NULL) {
4504        ALOGE("%s: input channel obj is NULL", __func__);
4505        return NULL;
4506    }
4507
4508    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4509            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4510    if (NULL == pChannel) {
4511        ALOGE("%s: no mem for reprocess channel", __func__);
4512        return NULL;
4513    }
4514
4515    // Capture channel, only need snapshot and postview streams start together
4516    mm_camera_channel_attr_t attr;
4517    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4518    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4519    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4520    rc = pChannel->initialize();
4521    if (rc != NO_ERROR) {
4522        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4523        delete pChannel;
4524        return NULL;
4525    }
4526
4527    // pp feature config
4528    cam_pp_feature_config_t pp_config;
4529    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4530    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4531        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4532        pp_config.sharpness = mJpegSettings->sharpness;
4533    }
4534
4535    if (isWNREnabled()) {
4536        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4537        pp_config.denoise2d.denoise_enable = 1;
4538        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4539    }
4540    if (needRotationReprocess()) {
4541        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4542        int rotation = mJpegSettings->jpeg_orientation;
4543        if (rotation == 0) {
4544            pp_config.rotation = ROTATE_0;
4545        } else if (rotation == 90) {
4546            pp_config.rotation = ROTATE_90;
4547        } else if (rotation == 180) {
4548            pp_config.rotation = ROTATE_180;
4549        } else if (rotation == 270) {
4550            pp_config.rotation = ROTATE_270;
4551        }
4552    }
4553
4554   rc = pChannel->addReprocStreamsFromSource(pp_config,
4555                                             pInputChannel,
4556                                             mMetadataChannel);
4557
4558    if (rc != NO_ERROR) {
4559        delete pChannel;
4560        return NULL;
4561    }
4562    return pChannel;
4563}
4564
4565int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4566{
4567    return gCamCapability[mCameraId]->min_num_pp_bufs;
4568}
4569
4570bool QCamera3HardwareInterface::isWNREnabled() {
4571    return gCamCapability[mCameraId]->isWnrSupported;
4572}
4573
4574}; //end namespace qcamera
4575