QCamera3HWI.cpp revision 0b11b27dc2c62f8fa8523ef8706d1cc61b3ea01e
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    mCallbackOps = callback_ops;
405
406    pthread_mutex_unlock(&mMutex);
407    mCameraInitialized = true;
408    return 0;
409
410err1:
411    pthread_mutex_unlock(&mMutex);
412    return rc;
413}
414
415/*===========================================================================
416 * FUNCTION   : configureStreams
417 *
418 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
419 *              and output streams.
420 *
421 * PARAMETERS :
422 *   @stream_list : streams to be configured
423 *
424 * RETURN     :
425 *
426 *==========================================================================*/
427int QCamera3HardwareInterface::configureStreams(
428        camera3_stream_configuration_t *streamList)
429{
430    int rc = 0;
431    mIsZslMode = false;
432
433    // Sanity check stream_list
434    if (streamList == NULL) {
435        ALOGE("%s: NULL stream configuration", __func__);
436        return BAD_VALUE;
437    }
438    if (streamList->streams == NULL) {
439        ALOGE("%s: NULL stream list", __func__);
440        return BAD_VALUE;
441    }
442
443    if (streamList->num_streams < 1) {
444        ALOGE("%s: Bad number of streams requested: %d", __func__,
445                streamList->num_streams);
446        return BAD_VALUE;
447    }
448
449    /* first invalidate all the steams in the mStreamList
450     * if they appear again, they will be validated */
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end(); it++) {
453        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454        channel->stop();
455        (*it)->status = INVALID;
456    }
457    if (mMetadataChannel) {
458        /* If content of mStreamInfo is not 0, there is metadata stream */
459        mMetadataChannel->stop();
460    }
461
462    pthread_mutex_lock(&mMutex);
463
464    camera3_stream_t *inputStream = NULL;
465    camera3_stream_t *jpegStream = NULL;
466    cam_stream_size_info_t stream_config_info;
467
468    for (size_t i = 0; i < streamList->num_streams; i++) {
469        camera3_stream_t *newStream = streamList->streams[i];
470        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
471                __func__, newStream->stream_type, newStream->format,
472                 newStream->width, newStream->height);
473        //if the stream is in the mStreamList validate it
474        bool stream_exists = false;
475        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
476                it != mStreamInfo.end(); it++) {
477            if ((*it)->stream == newStream) {
478                QCamera3Channel *channel =
479                    (QCamera3Channel*)(*it)->stream->priv;
480                stream_exists = true;
481                (*it)->status = RECONFIGURE;
482                /*delete the channel object associated with the stream because
483                  we need to reconfigure*/
484                delete channel;
485                (*it)->stream->priv = NULL;
486            }
487        }
488        if (!stream_exists) {
489            //new stream
490            stream_info_t* stream_info;
491            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
492            stream_info->stream = newStream;
493            stream_info->status = VALID;
494            stream_info->registered = 0;
495            mStreamInfo.push_back(stream_info);
496        }
497        if (newStream->stream_type == CAMERA3_STREAM_INPUT
498                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
499            if (inputStream != NULL) {
500                ALOGE("%s: Multiple input streams requested!", __func__);
501                pthread_mutex_unlock(&mMutex);
502                return BAD_VALUE;
503            }
504            inputStream = newStream;
505        }
506        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
507            jpegStream = newStream;
508        }
509    }
510    mInputStream = inputStream;
511
512    /*clean up invalid streams*/
513    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
514            it != mStreamInfo.end();) {
515        if(((*it)->status) == INVALID){
516            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
517            delete channel;
518            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
519            free(*it);
520            it = mStreamInfo.erase(it);
521        } else {
522            it++;
523        }
524    }
525    if (mMetadataChannel) {
526        delete mMetadataChannel;
527        mMetadataChannel = NULL;
528    }
529
530    //Create metadata channel and initialize it
531    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
532                    mCameraHandle->ops, captureResultCb,
533                    &gCamCapability[mCameraId]->padding_info, this);
534    if (mMetadataChannel == NULL) {
535        ALOGE("%s: failed to allocate metadata channel", __func__);
536        rc = -ENOMEM;
537        pthread_mutex_unlock(&mMutex);
538        return rc;
539    }
540    rc = mMetadataChannel->initialize();
541    if (rc < 0) {
542        ALOGE("%s: metadata channel initialization failed", __func__);
543        delete mMetadataChannel;
544        pthread_mutex_unlock(&mMutex);
545        return rc;
546    }
547
548    /* Allocate channel objects for the requested streams */
549    for (size_t i = 0; i < streamList->num_streams; i++) {
550        camera3_stream_t *newStream = streamList->streams[i];
551        uint32_t stream_usage = newStream->usage;
552        stream_config_info.stream_sizes[i].width = newStream->width;
553        stream_config_info.stream_sizes[i].height = newStream->height;
554        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
555            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
556            //for zsl stream the size is jpeg size
557            stream_config_info.stream_sizes[i].width = jpegStream->width;
558            stream_config_info.stream_sizes[i].height = jpegStream->height;
559            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
560        } else {
561           //for non zsl streams find out the format
562           switch (newStream->format) {
563           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
564              {
565                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
566                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
567                 } else {
568                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
569                 }
570              }
571              break;
572           case HAL_PIXEL_FORMAT_YCbCr_420_888:
573              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
574              break;
575           case HAL_PIXEL_FORMAT_BLOB:
576              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
577              break;
578           default:
579              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
580              break;
581           }
582        }
583        if (newStream->priv == NULL) {
584            //New stream, construct channel
585            switch (newStream->stream_type) {
586            case CAMERA3_STREAM_INPUT:
587                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
588                break;
589            case CAMERA3_STREAM_BIDIRECTIONAL:
590                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
591                    GRALLOC_USAGE_HW_CAMERA_WRITE;
592                break;
593            case CAMERA3_STREAM_OUTPUT:
594                /* For video encoding stream, set read/write rarely
595                 * flag so that they may be set to un-cached */
596                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
597                    newStream->usage =
598                         (GRALLOC_USAGE_SW_READ_RARELY |
599                         GRALLOC_USAGE_SW_WRITE_RARELY |
600                         GRALLOC_USAGE_HW_CAMERA_WRITE);
601                else
602                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
603                break;
604            default:
605                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
606                break;
607            }
608
609            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
610                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
611                QCamera3Channel *channel;
612                switch (newStream->format) {
613                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
614                case HAL_PIXEL_FORMAT_YCbCr_420_888:
615                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
616                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
617                        jpegStream) {
618                        uint32_t width = jpegStream->width;
619                        uint32_t height = jpegStream->height;
620                        mIsZslMode = true;
621                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
622                            mCameraHandle->ops, captureResultCb,
623                            &gCamCapability[mCameraId]->padding_info, this, newStream,
624                            width, height);
625                    } else
626                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
627                            mCameraHandle->ops, captureResultCb,
628                            &gCamCapability[mCameraId]->padding_info, this, newStream);
629                    if (channel == NULL) {
630                        ALOGE("%s: allocation of channel failed", __func__);
631                        pthread_mutex_unlock(&mMutex);
632                        return -ENOMEM;
633                    }
634
635                    newStream->priv = channel;
636                    break;
637                case HAL_PIXEL_FORMAT_BLOB:
638                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
639                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
640                            mCameraHandle->ops, captureResultCb,
641                            &gCamCapability[mCameraId]->padding_info, this, newStream);
642                    if (mPictureChannel == NULL) {
643                        ALOGE("%s: allocation of channel failed", __func__);
644                        pthread_mutex_unlock(&mMutex);
645                        return -ENOMEM;
646                    }
647                    newStream->priv = (QCamera3Channel*)mPictureChannel;
648                    break;
649
650                //TODO: Add support for app consumed format?
651                default:
652                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
653                    break;
654                }
655            }
656        } else {
657            // Channel already exists for this stream
658            // Do nothing for now
659        }
660    }
661    /*For the streams to be reconfigured we need to register the buffers
662      since the framework wont*/
663    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664            it != mStreamInfo.end(); it++) {
665        if ((*it)->status == RECONFIGURE) {
666            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
667            /*only register buffers for streams that have already been
668              registered*/
669            if ((*it)->registered) {
670                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
671                        (*it)->buffer_set.buffers);
672                if (rc != NO_ERROR) {
673                    ALOGE("%s: Failed to register the buffers of old stream,\
674                            rc = %d", __func__, rc);
675                }
676                ALOGV("%s: channel %p has %d buffers",
677                        __func__, channel, (*it)->buffer_set.num_buffers);
678            }
679        }
680
681        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
682        if (index == NAME_NOT_FOUND) {
683            mPendingBuffersMap.add((*it)->stream, 0);
684        } else {
685            mPendingBuffersMap.editValueAt(index) = 0;
686        }
687    }
688
689    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
690    mPendingRequestsList.clear();
691
692    /*flush the metadata list*/
693    if (!mStoredMetadataList.empty()) {
694        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
695              m != mStoredMetadataList.end(); m++) {
696            mMetadataChannel->bufDone(m->meta_buf);
697            free(m->meta_buf);
698            m = mStoredMetadataList.erase(m);
699        }
700    }
701    int32_t hal_version = CAM_HAL_V3;
702    stream_config_info.num_streams = streamList->num_streams;
703
704    //settings/parameters don't carry over for new configureStreams
705    memset(mParameters, 0, sizeof(parm_buffer_t));
706
707    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
708    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
709                sizeof(hal_version), &hal_version);
710
711    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
712                sizeof(stream_config_info), &stream_config_info);
713
714    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
715
716    mFirstRequest = true;
717
718    //Get min frame duration for this streams configuration
719    deriveMinFrameDuration();
720
721    pthread_mutex_unlock(&mMutex);
722    return rc;
723}
724
725/*===========================================================================
726 * FUNCTION   : validateCaptureRequest
727 *
728 * DESCRIPTION: validate a capture request from camera service
729 *
730 * PARAMETERS :
731 *   @request : request from framework to process
732 *
733 * RETURN     :
734 *
735 *==========================================================================*/
736int QCamera3HardwareInterface::validateCaptureRequest(
737                    camera3_capture_request_t *request)
738{
739    ssize_t idx = 0;
740    const camera3_stream_buffer_t *b;
741    CameraMetadata meta;
742
743    /* Sanity check the request */
744    if (request == NULL) {
745        ALOGE("%s: NULL capture request", __func__);
746        return BAD_VALUE;
747    }
748
749    uint32_t frameNumber = request->frame_number;
750    if (request->input_buffer != NULL &&
751            request->input_buffer->stream != mInputStream) {
752        ALOGE("%s: Request %d: Input buffer not from input stream!",
753                __FUNCTION__, frameNumber);
754        return BAD_VALUE;
755    }
756    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
757        ALOGE("%s: Request %d: No output buffers provided!",
758                __FUNCTION__, frameNumber);
759        return BAD_VALUE;
760    }
761    if (request->input_buffer != NULL) {
762        b = request->input_buffer;
763        QCamera3Channel *channel =
764            static_cast<QCamera3Channel*>(b->stream->priv);
765        if (channel == NULL) {
766            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
767                    __func__, frameNumber, idx);
768            return BAD_VALUE;
769        }
770        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
771            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
772                    __func__, frameNumber, idx);
773            return BAD_VALUE;
774        }
775        if (b->release_fence != -1) {
776            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
777                    __func__, frameNumber, idx);
778            return BAD_VALUE;
779        }
780        if (b->buffer == NULL) {
781            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
782                    __func__, frameNumber, idx);
783            return BAD_VALUE;
784        }
785    }
786
787    // Validate all buffers
788    b = request->output_buffers;
789    do {
790        QCamera3Channel *channel =
791                static_cast<QCamera3Channel*>(b->stream->priv);
792        if (channel == NULL) {
793            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
794                    __func__, frameNumber, idx);
795            return BAD_VALUE;
796        }
797        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
798            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
799                    __func__, frameNumber, idx);
800            return BAD_VALUE;
801        }
802        if (b->release_fence != -1) {
803            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
804                    __func__, frameNumber, idx);
805            return BAD_VALUE;
806        }
807        if (b->buffer == NULL) {
808            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
809                    __func__, frameNumber, idx);
810            return BAD_VALUE;
811        }
812        idx++;
813        b = request->output_buffers + idx;
814    } while (idx < (ssize_t)request->num_output_buffers);
815
816    return NO_ERROR;
817}
818
819/*===========================================================================
820 * FUNCTION   : deriveMinFrameDuration
821 *
822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
823 *              on currently configured streams.
824 *
825 * PARAMETERS : NONE
826 *
827 * RETURN     : NONE
828 *
829 *==========================================================================*/
830void QCamera3HardwareInterface::deriveMinFrameDuration()
831{
832    int32_t maxJpegDimension, maxProcessedDimension;
833
834    maxJpegDimension = 0;
835    maxProcessedDimension = 0;
836
837    // Figure out maximum jpeg, processed, and raw dimensions
838    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
839        it != mStreamInfo.end(); it++) {
840
841        // Input stream doesn't have valid stream_type
842        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
843            continue;
844
845        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
846        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
847            if (dimension > maxJpegDimension)
848                maxJpegDimension = dimension;
849        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
850            if (dimension > maxProcessedDimension)
851                maxProcessedDimension = dimension;
852        }
853    }
854
855    //Assume all jpeg dimensions are in processed dimensions.
856    if (maxJpegDimension > maxProcessedDimension)
857        maxProcessedDimension = maxJpegDimension;
858
859    //Find minimum durations for processed, jpeg, and raw
860    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
861    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
862        if (maxProcessedDimension ==
863            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
864            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
865            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
866            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
867            break;
868        }
869    }
870}
871
872/*===========================================================================
873 * FUNCTION   : getMinFrameDuration
874 *
875 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
876 *              and current request configuration.
877 *
878 * PARAMETERS : @request: requset sent by the frameworks
879 *
880 * RETURN     : min farme duration for a particular request
881 *
882 *==========================================================================*/
883int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
884{
885    bool hasJpegStream = false;
886    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
887        const camera3_stream_t *stream = request->output_buffers[i].stream;
888        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
889            hasJpegStream = true;
890    }
891
892    if (!hasJpegStream)
893        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
894    else
895        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
896}
897
898/*===========================================================================
899 * FUNCTION   : registerStreamBuffers
900 *
901 * DESCRIPTION: Register buffers for a given stream with the HAL device.
902 *
903 * PARAMETERS :
904 *   @stream_list : streams to be configured
905 *
906 * RETURN     :
907 *
908 *==========================================================================*/
909int QCamera3HardwareInterface::registerStreamBuffers(
910        const camera3_stream_buffer_set_t *buffer_set)
911{
912    int rc = 0;
913
914    pthread_mutex_lock(&mMutex);
915
916    if (buffer_set == NULL) {
917        ALOGE("%s: Invalid buffer_set parameter.", __func__);
918        pthread_mutex_unlock(&mMutex);
919        return -EINVAL;
920    }
921    if (buffer_set->stream == NULL) {
922        ALOGE("%s: Invalid stream parameter.", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return -EINVAL;
925    }
926    if (buffer_set->num_buffers < 1) {
927        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
928        pthread_mutex_unlock(&mMutex);
929        return -EINVAL;
930    }
931    if (buffer_set->buffers == NULL) {
932        ALOGE("%s: Invalid buffers parameter.", __func__);
933        pthread_mutex_unlock(&mMutex);
934        return -EINVAL;
935    }
936
937    camera3_stream_t *stream = buffer_set->stream;
938    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
939
940    //set the buffer_set in the mStreamInfo array
941    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
942            it != mStreamInfo.end(); it++) {
943        if ((*it)->stream == stream) {
944            uint32_t numBuffers = buffer_set->num_buffers;
945            (*it)->buffer_set.stream = buffer_set->stream;
946            (*it)->buffer_set.num_buffers = numBuffers;
947            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
948            if ((*it)->buffer_set.buffers == NULL) {
949                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
950                pthread_mutex_unlock(&mMutex);
951                return -ENOMEM;
952            }
953            for (size_t j = 0; j < numBuffers; j++){
954                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
955            }
956            (*it)->registered = 1;
957        }
958    }
959    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
960    if (rc < 0) {
961        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
962        pthread_mutex_unlock(&mMutex);
963        return -ENODEV;
964    }
965
966    pthread_mutex_unlock(&mMutex);
967    return NO_ERROR;
968}
969
970/*===========================================================================
971 * FUNCTION   : processCaptureRequest
972 *
973 * DESCRIPTION: process a capture request from camera service
974 *
975 * PARAMETERS :
976 *   @request : request from framework to process
977 *
978 * RETURN     :
979 *
980 *==========================================================================*/
981int QCamera3HardwareInterface::processCaptureRequest(
982                    camera3_capture_request_t *request)
983{
984    int rc = NO_ERROR;
985    int32_t request_id;
986    CameraMetadata meta;
987    MetadataBufferInfo reproc_meta;
988    int queueMetadata = 0;
989
990    pthread_mutex_lock(&mMutex);
991
992    rc = validateCaptureRequest(request);
993    if (rc != NO_ERROR) {
994        ALOGE("%s: incoming request is not valid", __func__);
995        pthread_mutex_unlock(&mMutex);
996        return rc;
997    }
998
999    meta = request->settings;
1000
1001    // For first capture request, send capture intent, and
1002    // stream on all streams
1003    if (mFirstRequest) {
1004
1005        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1006            int32_t hal_version = CAM_HAL_V3;
1007            uint8_t captureIntent =
1008                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1009
1010            memset(mParameters, 0, sizeof(parm_buffer_t));
1011            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1012            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1013                sizeof(hal_version), &hal_version);
1014            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1015                sizeof(captureIntent), &captureIntent);
1016            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1017                mParameters);
1018        }
1019
1020        mMetadataChannel->start();
1021        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1022            it != mStreamInfo.end(); it++) {
1023            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1024            channel->start();
1025        }
1026    }
1027
1028    uint32_t frameNumber = request->frame_number;
1029    uint32_t streamTypeMask = 0;
1030
1031    if (meta.exists(ANDROID_REQUEST_ID)) {
1032        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1033        mCurrentRequestId = request_id;
1034        ALOGV("%s: Received request with id: %d",__func__, request_id);
1035    } else if (mFirstRequest || mCurrentRequestId == -1){
1036        ALOGE("%s: Unable to find request id field, \
1037                & no previous id available", __func__);
1038        return NAME_NOT_FOUND;
1039    } else {
1040        ALOGV("%s: Re-using old request id", __func__);
1041        request_id = mCurrentRequestId;
1042    }
1043
1044    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1045                                    __func__, __LINE__,
1046                                    request->num_output_buffers,
1047                                    request->input_buffer,
1048                                    frameNumber);
1049    // Acquire all request buffers first
1050    int blob_request = 0;
1051    for (size_t i = 0; i < request->num_output_buffers; i++) {
1052        const camera3_stream_buffer_t& output = request->output_buffers[i];
1053        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1054        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1055
1056        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1057        //Call function to store local copy of jpeg data for encode params.
1058            blob_request = 1;
1059            rc = getJpegSettings(request->settings);
1060            if (rc < 0) {
1061                ALOGE("%s: failed to get jpeg parameters", __func__);
1062                pthread_mutex_unlock(&mMutex);
1063                return rc;
1064            }
1065        }
1066
1067        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1068        if (rc != OK) {
1069            ALOGE("%s: fence wait failed %d", __func__, rc);
1070            pthread_mutex_unlock(&mMutex);
1071            return rc;
1072        }
1073        streamTypeMask |= channel->getStreamTypeMask();
1074    }
1075
1076    rc = setFrameParameters(request, streamTypeMask);
1077    if (rc < 0) {
1078        ALOGE("%s: fail to set frame parameters", __func__);
1079        pthread_mutex_unlock(&mMutex);
1080        return rc;
1081    }
1082
1083    /* Update pending request list and pending buffers map */
1084    PendingRequestInfo pendingRequest;
1085    pendingRequest.frame_number = frameNumber;
1086    pendingRequest.num_buffers = request->num_output_buffers;
1087    pendingRequest.request_id = request_id;
1088    pendingRequest.blob_request = blob_request;
1089    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1090
1091    for (size_t i = 0; i < request->num_output_buffers; i++) {
1092        RequestedBufferInfo requestedBuf;
1093        requestedBuf.stream = request->output_buffers[i].stream;
1094        requestedBuf.buffer = NULL;
1095        pendingRequest.buffers.push_back(requestedBuf);
1096
1097        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1098    }
1099    mPendingRequestsList.push_back(pendingRequest);
1100
1101    // Notify metadata channel we receive a request
1102    mMetadataChannel->request(NULL, frameNumber);
1103
1104    // Call request on other streams
1105    for (size_t i = 0; i < request->num_output_buffers; i++) {
1106        const camera3_stream_buffer_t& output = request->output_buffers[i];
1107        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1108        mm_camera_buf_def_t *pInputBuffer = NULL;
1109
1110        if (channel == NULL) {
1111            ALOGE("%s: invalid channel pointer for stream", __func__);
1112            continue;
1113        }
1114
1115        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1116            QCamera3RegularChannel* inputChannel = NULL;
1117            if(request->input_buffer != NULL){
1118                //Try to get the internal format
1119                inputChannel = (QCamera3RegularChannel*)
1120                    request->input_buffer->stream->priv;
1121                if(inputChannel == NULL ){
1122                    ALOGE("%s: failed to get input channel handle", __func__);
1123                } else {
1124                    pInputBuffer =
1125                        inputChannel->getInternalFormatBuffer(
1126                                request->input_buffer->buffer);
1127                    ALOGD("%s: Input buffer dump",__func__);
1128                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1129                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1130                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1131                    ALOGD("Handle:%p", request->input_buffer->buffer);
1132                    //TODO: need to get corresponding metadata and send it to pproc
1133                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1134                         m != mStoredMetadataList.end(); m++) {
1135                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1136                            reproc_meta.meta_buf = m->meta_buf;
1137                            queueMetadata = 1;
1138                            break;
1139                        }
1140                    }
1141                }
1142            }
1143            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1144                            pInputBuffer,(QCamera3Channel*)inputChannel);
1145            if (queueMetadata) {
1146                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1147            }
1148        } else {
1149            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1150                __LINE__, output.buffer, frameNumber);
1151            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1152                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1153                     m != mStoredMetadataList.end(); m++) {
1154                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1155                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1156                            mMetadataChannel->bufDone(m->meta_buf);
1157                            free(m->meta_buf);
1158                            m = mStoredMetadataList.erase(m);
1159                            break;
1160                        }
1161                   }
1162                }
1163            }
1164            rc = channel->request(output.buffer, frameNumber);
1165        }
1166        if (rc < 0)
1167            ALOGE("%s: request failed", __func__);
1168    }
1169
1170    mFirstRequest = false;
1171    // Added a timed condition wait
1172    struct timespec ts;
1173    uint8_t isValidTimeout = 1;
1174    rc = clock_gettime(CLOCK_REALTIME, &ts);
1175    if (rc < 0) {
1176        isValidTimeout = 0;
1177        ALOGE("%s: Error reading the real time clock!!", __func__);
1178    }
1179    else {
1180        // Make timeout as 5 sec for request to be honored
1181        ts.tv_sec += 5;
1182    }
1183    //Block on conditional variable
1184    mPendingRequest = 1;
1185    while (mPendingRequest == 1) {
1186        if (!isValidTimeout) {
1187            ALOGV("%s: Blocking on conditional wait", __func__);
1188            pthread_cond_wait(&mRequestCond, &mMutex);
1189        }
1190        else {
1191            ALOGV("%s: Blocking on timed conditional wait", __func__);
1192            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1193            if (rc == ETIMEDOUT) {
1194                rc = -ENODEV;
1195                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1196                break;
1197            }
1198        }
1199        ALOGV("%s: Unblocked", __func__);
1200    }
1201
1202    pthread_mutex_unlock(&mMutex);
1203    return rc;
1204}
1205
1206/*===========================================================================
1207 * FUNCTION   : getMetadataVendorTagOps
1208 *
1209 * DESCRIPTION:
1210 *
1211 * PARAMETERS :
1212 *
1213 *
1214 * RETURN     :
1215 *==========================================================================*/
1216void QCamera3HardwareInterface::getMetadataVendorTagOps(
1217                    vendor_tag_query_ops_t* /*ops*/)
1218{
1219    /* Enable locks when we eventually add Vendor Tags */
1220    /*
1221    pthread_mutex_lock(&mMutex);
1222
1223    pthread_mutex_unlock(&mMutex);
1224    */
1225    return;
1226}
1227
1228/*===========================================================================
1229 * FUNCTION   : dump
1230 *
1231 * DESCRIPTION:
1232 *
1233 * PARAMETERS :
1234 *
1235 *
1236 * RETURN     :
1237 *==========================================================================*/
1238void QCamera3HardwareInterface::dump(int /*fd*/)
1239{
1240    /*Enable lock when we implement this function*/
1241    /*
1242    pthread_mutex_lock(&mMutex);
1243
1244    pthread_mutex_unlock(&mMutex);
1245    */
1246    return;
1247}
1248
1249/*===========================================================================
1250 * FUNCTION   : flush
1251 *
1252 * DESCRIPTION:
1253 *
1254 * PARAMETERS :
1255 *
1256 *
1257 * RETURN     :
1258 *==========================================================================*/
1259int QCamera3HardwareInterface::flush()
1260{
1261    /*Enable lock when we implement this function*/
1262    /*
1263    pthread_mutex_lock(&mMutex);
1264
1265    pthread_mutex_unlock(&mMutex);
1266    */
1267    return 0;
1268}
1269
1270/*===========================================================================
1271 * FUNCTION   : captureResultCb
1272 *
1273 * DESCRIPTION: Callback handler for all capture result
1274 *              (streams, as well as metadata)
1275 *
1276 * PARAMETERS :
1277 *   @metadata : metadata information
1278 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1279 *               NULL if metadata.
1280 *
1281 * RETURN     : NONE
1282 *==========================================================================*/
1283void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1284                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1285{
1286    pthread_mutex_lock(&mMutex);
1287
1288    if (metadata_buf) {
1289        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1290        int32_t frame_number_valid = *(int32_t *)
1291            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1292        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1293            CAM_INTF_META_PENDING_REQUESTS, metadata);
1294        uint32_t frame_number = *(uint32_t *)
1295            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1296        const struct timeval *tv = (const struct timeval *)
1297            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1298        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1299            tv->tv_usec * NSEC_PER_USEC;
1300
1301        if (!frame_number_valid) {
1302            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1303            mMetadataChannel->bufDone(metadata_buf);
1304            goto done_metadata;
1305        }
1306        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1307                frame_number, capture_time);
1308
1309        // Go through the pending requests info and send shutter/results to frameworks
1310        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1311                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1312            camera3_capture_result_t result;
1313            camera3_notify_msg_t notify_msg;
1314            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1315
1316            // Flush out all entries with less or equal frame numbers.
1317
1318            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1319            //Right now it's the same as metadata timestamp
1320
1321            //TODO: When there is metadata drop, how do we derive the timestamp of
1322            //dropped frames? For now, we fake the dropped timestamp by substracting
1323            //from the reported timestamp
1324            nsecs_t current_capture_time = capture_time -
1325                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1326
1327            // Send shutter notify to frameworks
1328            notify_msg.type = CAMERA3_MSG_SHUTTER;
1329            notify_msg.message.shutter.frame_number = i->frame_number;
1330            notify_msg.message.shutter.timestamp = current_capture_time;
1331            mCallbackOps->notify(mCallbackOps, &notify_msg);
1332            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1333                    i->frame_number, capture_time);
1334
1335            // Send empty metadata with already filled buffers for dropped metadata
1336            // and send valid metadata with already filled buffers for current metadata
1337            if (i->frame_number < frame_number) {
1338                CameraMetadata dummyMetadata;
1339                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1340                        &current_capture_time, 1);
1341                dummyMetadata.update(ANDROID_REQUEST_ID,
1342                        &(i->request_id), 1);
1343                result.result = dummyMetadata.release();
1344            } else {
1345                result.result = translateCbMetadataToResultMetadata(metadata,
1346                        current_capture_time, i->request_id);
1347                if (mIsZslMode) {
1348                   int found_metadata = 0;
1349                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1350                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1351                        j != i->buffers.end(); j++) {
1352                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1353                         //check if corresp. zsl already exists in the stored metadata list
1354                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1355                               m != mStoredMetadataList.begin(); m++) {
1356                            if (m->frame_number == frame_number) {
1357                               m->meta_buf = metadata_buf;
1358                               found_metadata = 1;
1359                               break;
1360                            }
1361                         }
1362                         if (!found_metadata) {
1363                            MetadataBufferInfo store_meta_info;
1364                            store_meta_info.meta_buf = metadata_buf;
1365                            store_meta_info.frame_number = frame_number;
1366                            mStoredMetadataList.push_back(store_meta_info);
1367                            found_metadata = 1;
1368                         }
1369                      }
1370                   }
1371                   if (!found_metadata) {
1372                       if (!i->input_buffer_present && i->blob_request) {
1373                          //livesnapshot or fallback non-zsl snapshot case
1374                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1375                                j != i->buffers.end(); j++){
1376                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1377                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1378                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1379                                 break;
1380                              }
1381                         }
1382                       } else {
1383                            //return the metadata immediately
1384                            mMetadataChannel->bufDone(metadata_buf);
1385                            free(metadata_buf);
1386                       }
1387                   }
1388               } else if (!mIsZslMode && i->blob_request) {
1389                   //If it is a blob request then send the metadata to the picture channel
1390                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1391               } else {
1392                   // Return metadata buffer
1393                   mMetadataChannel->bufDone(metadata_buf);
1394                   free(metadata_buf);
1395               }
1396
1397            }
1398            if (!result.result) {
1399                ALOGE("%s: metadata is NULL", __func__);
1400            }
1401            result.frame_number = i->frame_number;
1402            result.num_output_buffers = 0;
1403            result.output_buffers = NULL;
1404            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1405                    j != i->buffers.end(); j++) {
1406                if (j->buffer) {
1407                    result.num_output_buffers++;
1408                }
1409            }
1410
1411            if (result.num_output_buffers > 0) {
1412                camera3_stream_buffer_t *result_buffers =
1413                    new camera3_stream_buffer_t[result.num_output_buffers];
1414                if (!result_buffers) {
1415                    ALOGE("%s: Fatal error: out of memory", __func__);
1416                }
1417                size_t result_buffers_idx = 0;
1418                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1419                        j != i->buffers.end(); j++) {
1420                    if (j->buffer) {
1421                        result_buffers[result_buffers_idx++] = *(j->buffer);
1422                        free(j->buffer);
1423                        j->buffer = NULL;
1424                        mPendingBuffersMap.editValueFor(j->stream)--;
1425                    }
1426                }
1427                result.output_buffers = result_buffers;
1428
1429                mCallbackOps->process_capture_result(mCallbackOps, &result);
1430                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1431                        __func__, result.frame_number, current_capture_time);
1432                free_camera_metadata((camera_metadata_t *)result.result);
1433                delete[] result_buffers;
1434            } else {
1435                mCallbackOps->process_capture_result(mCallbackOps, &result);
1436                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1437                        __func__, result.frame_number, current_capture_time);
1438                free_camera_metadata((camera_metadata_t *)result.result);
1439            }
1440            // erase the element from the list
1441            i = mPendingRequestsList.erase(i);
1442        }
1443
1444
1445done_metadata:
1446        bool max_buffers_dequeued = false;
1447        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1448            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1449            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1450            if (queued_buffers == stream->max_buffers) {
1451                max_buffers_dequeued = true;
1452                break;
1453            }
1454        }
1455        if (!max_buffers_dequeued && !pending_requests) {
1456            // Unblock process_capture_request
1457            mPendingRequest = 0;
1458            pthread_cond_signal(&mRequestCond);
1459        }
1460    } else {
1461        // If the frame number doesn't exist in the pending request list,
1462        // directly send the buffer to the frameworks, and update pending buffers map
1463        // Otherwise, book-keep the buffer.
1464        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1465        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1466            i++;
1467        }
1468        if (i == mPendingRequestsList.end()) {
1469            // Verify all pending requests frame_numbers are greater
1470            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1471                    j != mPendingRequestsList.end(); j++) {
1472                if (j->frame_number < frame_number) {
1473                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1474                            __func__, j->frame_number, frame_number);
1475                }
1476            }
1477            camera3_capture_result_t result;
1478            result.result = NULL;
1479            result.frame_number = frame_number;
1480            result.num_output_buffers = 1;
1481            result.output_buffers = buffer;
1482            ALOGV("%s: result frame_number = %d, buffer = %p",
1483                    __func__, frame_number, buffer);
1484            mPendingBuffersMap.editValueFor(buffer->stream)--;
1485            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1486                int found = 0;
1487                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1488                      k != mStoredMetadataList.end(); k++) {
1489                    if (k->frame_number == frame_number) {
1490                        k->zsl_buf_hdl = buffer->buffer;
1491                        found = 1;
1492                        break;
1493                    }
1494                }
1495                if (!found) {
1496                   MetadataBufferInfo meta_info;
1497                   meta_info.frame_number = frame_number;
1498                   meta_info.zsl_buf_hdl = buffer->buffer;
1499                   mStoredMetadataList.push_back(meta_info);
1500                }
1501            }
1502            mCallbackOps->process_capture_result(mCallbackOps, &result);
1503        } else {
1504            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1505                    j != i->buffers.end(); j++) {
1506                if (j->stream == buffer->stream) {
1507                    if (j->buffer != NULL) {
1508                        ALOGE("%s: Error: buffer is already set", __func__);
1509                    } else {
1510                        j->buffer = (camera3_stream_buffer_t *)malloc(
1511                                sizeof(camera3_stream_buffer_t));
1512                        *(j->buffer) = *buffer;
1513                        ALOGV("%s: cache buffer %p at result frame_number %d",
1514                                __func__, buffer, frame_number);
1515                    }
1516                }
1517            }
1518        }
1519    }
1520    pthread_mutex_unlock(&mMutex);
1521    return;
1522}
1523
1524/*===========================================================================
1525 * FUNCTION   : translateCbMetadataToResultMetadata
1526 *
1527 * DESCRIPTION:
1528 *
1529 * PARAMETERS :
1530 *   @metadata : metadata information from callback
1531 *
1532 * RETURN     : camera_metadata_t*
1533 *              metadata in a format specified by fwk
1534 *==========================================================================*/
1535camera_metadata_t*
1536QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1537                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1538                                 int32_t request_id)
1539{
1540    CameraMetadata camMetadata;
1541    camera_metadata_t* resultMetadata;
1542
1543    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1544    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1545
1546    /*CAM_INTF_META_HISTOGRAM - TODO*/
1547    /*cam_hist_stats_t  *histogram =
1548      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1549      metadata);*/
1550
1551    /*face detection*/
1552    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1553        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1554    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1555    int32_t faceIds[numFaces];
1556    uint8_t faceScores[numFaces];
1557    int32_t faceRectangles[numFaces * 4];
1558    int32_t faceLandmarks[numFaces * 6];
1559    int j = 0, k = 0;
1560    for (int i = 0; i < numFaces; i++) {
1561        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1562        faceScores[i] = faceDetectionInfo->faces[i].score;
1563        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1564                faceRectangles+j, -1);
1565        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1566        j+= 4;
1567        k+= 6;
1568    }
1569    if (numFaces > 0) {
1570        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1571        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1572        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1573            faceRectangles, numFaces*4);
1574        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1575            faceLandmarks, numFaces*6);
1576    }
1577
1578    uint8_t  *color_correct_mode =
1579        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1580    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1581
1582    int32_t  *ae_precapture_id =
1583        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1584    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1585
1586    /*aec regions*/
1587    cam_area_t  *hAeRegions =
1588        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1589    int32_t aeRegions[5];
1590    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1591    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1592
1593    uint8_t *ae_state =
1594            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1595    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1596
1597    uint8_t  *focusMode =
1598        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1599    uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1600            sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1601    camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1602
1603    /*af regions*/
1604    cam_area_t  *hAfRegions =
1605        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1606    int32_t afRegions[5];
1607    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1608    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1609
1610    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1611    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1612
1613    int32_t  *afTriggerId =
1614        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1615    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1616
1617    uint8_t  *whiteBalance =
1618        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1619    uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1620        sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1621        *whiteBalance);
1622    camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1623
1624    /*awb regions*/
1625    cam_area_t  *hAwbRegions =
1626        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1627    int32_t awbRegions[5];
1628    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1629    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1630
1631    uint8_t  *whiteBalanceState =
1632        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1633    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1634
1635    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1636    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1637
1638    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1639    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1640
1641    uint8_t  *flashPower =
1642        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1643    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1644
1645    int64_t  *flashFiringTime =
1646        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1647    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1648
1649    /*int32_t  *ledMode =
1650      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1651      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1652
1653    uint8_t  *flashState =
1654        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1655    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1656
1657    uint8_t  *hotPixelMode =
1658        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1659    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1660
1661    float  *lensAperture =
1662        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1663    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1664
1665    float  *filterDensity =
1666        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1667    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1668
1669    float  *focalLength =
1670        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1671    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1672
1673    float  *focusDistance =
1674        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1675    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1676
1677    float  *focusRange =
1678        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1679    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1680
1681    uint8_t  *opticalStab =
1682        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1683    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1684
1685    /*int32_t  *focusState =
1686      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1687      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1688
1689    uint8_t  *noiseRedMode =
1690        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1691    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1692
1693    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1694
1695    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1696        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1697    int32_t scalerCropRegion[4];
1698    scalerCropRegion[0] = hScalerCropRegion->left;
1699    scalerCropRegion[1] = hScalerCropRegion->top;
1700    scalerCropRegion[2] = hScalerCropRegion->width;
1701    scalerCropRegion[3] = hScalerCropRegion->height;
1702    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1703
1704    int64_t  *sensorExpTime =
1705        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1706    mMetadataResponse.exposure_time = *sensorExpTime;
1707    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1708    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1709
1710    int64_t  *sensorFameDuration =
1711        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1712    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1713    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1714
1715    int32_t  *sensorSensitivity =
1716        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1717    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1718    mMetadataResponse.iso_speed = *sensorSensitivity;
1719    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1720
1721    uint8_t  *shadingMode =
1722        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1723    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1724
1725    uint8_t  *faceDetectMode =
1726        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1727    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1728        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1729        *faceDetectMode);
1730    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1731
1732    uint8_t  *histogramMode =
1733        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1734    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1735
1736    uint8_t  *sharpnessMapMode =
1737        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1738    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1739            sharpnessMapMode, 1);
1740
1741    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1742    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1743        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1744    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1745            (int32_t*)sharpnessMap->sharpness,
1746            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1747
1748    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1749        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1750    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1751    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1752    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1753                       (float*)lensShadingMap->lens_shading,
1754                       4*map_width*map_height);
1755
1756    //Populate CAM_INTF_META_TONEMAP_CURVES
1757    /* ch0 = G, ch 1 = B, ch 2 = R*/
1758    cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1759        POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1760    camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1761                       (float*)tonemap->curves[0].tonemap_points,
1762                       tonemap->tonemap_points_cnt * 2);
1763
1764    camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1765                       (float*)tonemap->curves[1].tonemap_points,
1766                       tonemap->tonemap_points_cnt * 2);
1767
1768    camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1769                       (float*)tonemap->curves[2].tonemap_points,
1770                       tonemap->tonemap_points_cnt * 2);
1771
1772    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1773        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1774    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1775
1776    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1777        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1778    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1779                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1780
1781    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1782        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1783    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1784                       predColorCorrectionGains->gains, 4);
1785
1786    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1787        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1788    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1789                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1790
1791    uint8_t *blackLevelLock = (uint8_t*)
1792        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1793    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1794
1795    uint8_t *sceneFlicker = (uint8_t*)
1796        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1797    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1798
1799
1800    resultMetadata = camMetadata.release();
1801    return resultMetadata;
1802}
1803
1804/*===========================================================================
1805 * FUNCTION   : convertToRegions
1806 *
1807 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1808 *
1809 * PARAMETERS :
1810 *   @rect   : cam_rect_t struct to convert
1811 *   @region : int32_t destination array
1812 *   @weight : if we are converting from cam_area_t, weight is valid
1813 *             else weight = -1
1814 *
1815 *==========================================================================*/
1816void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1817    region[0] = rect.left;
1818    region[1] = rect.top;
1819    region[2] = rect.left + rect.width;
1820    region[3] = rect.top + rect.height;
1821    if (weight > -1) {
1822        region[4] = weight;
1823    }
1824}
1825
1826/*===========================================================================
1827 * FUNCTION   : convertFromRegions
1828 *
1829 * DESCRIPTION: helper method to convert from array to cam_rect_t
1830 *
1831 * PARAMETERS :
1832 *   @rect   : cam_rect_t struct to convert
1833 *   @region : int32_t destination array
1834 *   @weight : if we are converting from cam_area_t, weight is valid
1835 *             else weight = -1
1836 *
1837 *==========================================================================*/
1838void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1839                                                   const camera_metadata_t *settings,
1840                                                   uint32_t tag){
1841    CameraMetadata frame_settings;
1842    frame_settings = settings;
1843    int32_t x_min = frame_settings.find(tag).data.i32[0];
1844    int32_t y_min = frame_settings.find(tag).data.i32[1];
1845    int32_t x_max = frame_settings.find(tag).data.i32[2];
1846    int32_t y_max = frame_settings.find(tag).data.i32[3];
1847    roi->weight = frame_settings.find(tag).data.i32[4];
1848    roi->rect.left = x_min;
1849    roi->rect.top = y_min;
1850    roi->rect.width = x_max - x_min;
1851    roi->rect.height = y_max - y_min;
1852}
1853
1854/*===========================================================================
1855 * FUNCTION   : resetIfNeededROI
1856 *
1857 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1858 *              crop region
1859 *
1860 * PARAMETERS :
1861 *   @roi       : cam_area_t struct to resize
1862 *   @scalerCropRegion : cam_crop_region_t region to compare against
1863 *
1864 *
1865 *==========================================================================*/
1866bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1867                                                 const cam_crop_region_t* scalerCropRegion)
1868{
1869    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1870    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1871    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1872    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1873    if ((roi_x_max < scalerCropRegion->left) ||
1874        (roi_y_max < scalerCropRegion->top)  ||
1875        (roi->rect.left > crop_x_max) ||
1876        (roi->rect.top > crop_y_max)){
1877        return false;
1878    }
1879    if (roi->rect.left < scalerCropRegion->left) {
1880        roi->rect.left = scalerCropRegion->left;
1881    }
1882    if (roi->rect.top < scalerCropRegion->top) {
1883        roi->rect.top = scalerCropRegion->top;
1884    }
1885    if (roi_x_max > crop_x_max) {
1886        roi_x_max = crop_x_max;
1887    }
1888    if (roi_y_max > crop_y_max) {
1889        roi_y_max = crop_y_max;
1890    }
1891    roi->rect.width = roi_x_max - roi->rect.left;
1892    roi->rect.height = roi_y_max - roi->rect.top;
1893    return true;
1894}
1895
1896/*===========================================================================
1897 * FUNCTION   : convertLandmarks
1898 *
1899 * DESCRIPTION: helper method to extract the landmarks from face detection info
1900 *
1901 * PARAMETERS :
1902 *   @face   : cam_rect_t struct to convert
1903 *   @landmarks : int32_t destination array
1904 *
1905 *
1906 *==========================================================================*/
1907void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1908{
1909    landmarks[0] = face.left_eye_center.x;
1910    landmarks[1] = face.left_eye_center.y;
1911    landmarks[2] = face.right_eye_center.y;
1912    landmarks[3] = face.right_eye_center.y;
1913    landmarks[4] = face.mouth_center.x;
1914    landmarks[5] = face.mouth_center.y;
1915}
1916
1917#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1918/*===========================================================================
1919 * FUNCTION   : initCapabilities
1920 *
1921 * DESCRIPTION: initialize camera capabilities in static data struct
1922 *
1923 * PARAMETERS :
1924 *   @cameraId  : camera Id
1925 *
1926 * RETURN     : int32_t type of status
1927 *              NO_ERROR  -- success
1928 *              none-zero failure code
1929 *==========================================================================*/
1930int QCamera3HardwareInterface::initCapabilities(int cameraId)
1931{
1932    int rc = 0;
1933    mm_camera_vtbl_t *cameraHandle = NULL;
1934    QCamera3HeapMemory *capabilityHeap = NULL;
1935
1936    cameraHandle = camera_open(cameraId);
1937    if (!cameraHandle) {
1938        ALOGE("%s: camera_open failed", __func__);
1939        rc = -1;
1940        goto open_failed;
1941    }
1942
1943    capabilityHeap = new QCamera3HeapMemory();
1944    if (capabilityHeap == NULL) {
1945        ALOGE("%s: creation of capabilityHeap failed", __func__);
1946        goto heap_creation_failed;
1947    }
1948    /* Allocate memory for capability buffer */
1949    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1950    if(rc != OK) {
1951        ALOGE("%s: No memory for cappability", __func__);
1952        goto allocate_failed;
1953    }
1954
1955    /* Map memory for capability buffer */
1956    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1957    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1958                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1959                                capabilityHeap->getFd(0),
1960                                sizeof(cam_capability_t));
1961    if(rc < 0) {
1962        ALOGE("%s: failed to map capability buffer", __func__);
1963        goto map_failed;
1964    }
1965
1966    /* Query Capability */
1967    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1968    if(rc < 0) {
1969        ALOGE("%s: failed to query capability",__func__);
1970        goto query_failed;
1971    }
1972    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1973    if (!gCamCapability[cameraId]) {
1974        ALOGE("%s: out of memory", __func__);
1975        goto query_failed;
1976    }
1977    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1978                                        sizeof(cam_capability_t));
1979    rc = 0;
1980
1981query_failed:
1982    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1983                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1984map_failed:
1985    capabilityHeap->deallocate();
1986allocate_failed:
1987    delete capabilityHeap;
1988heap_creation_failed:
1989    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1990    cameraHandle = NULL;
1991open_failed:
1992    return rc;
1993}
1994
1995/*===========================================================================
1996 * FUNCTION   : initParameters
1997 *
1998 * DESCRIPTION: initialize camera parameters
1999 *
2000 * PARAMETERS :
2001 *
2002 * RETURN     : int32_t type of status
2003 *              NO_ERROR  -- success
2004 *              none-zero failure code
2005 *==========================================================================*/
2006int QCamera3HardwareInterface::initParameters()
2007{
2008    int rc = 0;
2009
2010    //Allocate Set Param Buffer
2011    mParamHeap = new QCamera3HeapMemory();
2012    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2013    if(rc != OK) {
2014        rc = NO_MEMORY;
2015        ALOGE("Failed to allocate SETPARM Heap memory");
2016        delete mParamHeap;
2017        mParamHeap = NULL;
2018        return rc;
2019    }
2020
2021    //Map memory for parameters buffer
2022    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2023            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2024            mParamHeap->getFd(0),
2025            sizeof(parm_buffer_t));
2026    if(rc < 0) {
2027        ALOGE("%s:failed to map SETPARM buffer",__func__);
2028        rc = FAILED_TRANSACTION;
2029        mParamHeap->deallocate();
2030        delete mParamHeap;
2031        mParamHeap = NULL;
2032        return rc;
2033    }
2034
2035    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2036    return rc;
2037}
2038
2039/*===========================================================================
2040 * FUNCTION   : deinitParameters
2041 *
2042 * DESCRIPTION: de-initialize camera parameters
2043 *
2044 * PARAMETERS :
2045 *
2046 * RETURN     : NONE
2047 *==========================================================================*/
2048void QCamera3HardwareInterface::deinitParameters()
2049{
2050    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2051            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2052
2053    mParamHeap->deallocate();
2054    delete mParamHeap;
2055    mParamHeap = NULL;
2056
2057    mParameters = NULL;
2058}
2059
2060/*===========================================================================
2061 * FUNCTION   : calcMaxJpegSize
2062 *
2063 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2064 *
2065 * PARAMETERS :
2066 *
2067 * RETURN     : max_jpeg_size
2068 *==========================================================================*/
2069int QCamera3HardwareInterface::calcMaxJpegSize()
2070{
2071    int32_t max_jpeg_size = 0;
2072    int temp_width, temp_height;
2073    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2074        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2075        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2076        if (temp_width * temp_height > max_jpeg_size ) {
2077            max_jpeg_size = temp_width * temp_height;
2078        }
2079    }
2080    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2081    return max_jpeg_size;
2082}
2083
2084/*===========================================================================
2085 * FUNCTION   : initStaticMetadata
2086 *
2087 * DESCRIPTION: initialize the static metadata
2088 *
2089 * PARAMETERS :
2090 *   @cameraId  : camera Id
2091 *
2092 * RETURN     : int32_t type of status
2093 *              0  -- success
2094 *              non-zero failure code
2095 *==========================================================================*/
2096int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2097{
2098    int rc = 0;
2099    CameraMetadata staticInfo;
2100
2101    /* android.info: hardware level */
2102    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2103    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2104        &supportedHardwareLevel, 1);
2105
2106    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2107    /*HAL 3 only*/
2108    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2109                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2110
2111    /*hard coded for now but this should come from sensor*/
2112    float min_focus_distance;
2113    if(facingBack){
2114        min_focus_distance = 10;
2115    } else {
2116        min_focus_distance = 0;
2117    }
2118    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2119                    &min_focus_distance, 1);
2120
2121    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2122                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2123
2124    /*should be using focal lengths but sensor doesn't provide that info now*/
2125    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2126                      &gCamCapability[cameraId]->focal_length,
2127                      1);
2128
2129    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2130                      gCamCapability[cameraId]->apertures,
2131                      gCamCapability[cameraId]->apertures_count);
2132
2133    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2134                gCamCapability[cameraId]->filter_densities,
2135                gCamCapability[cameraId]->filter_densities_count);
2136
2137
2138    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2139                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2140                      gCamCapability[cameraId]->optical_stab_modes_count);
2141
2142    staticInfo.update(ANDROID_LENS_POSITION,
2143                      gCamCapability[cameraId]->lens_position,
2144                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2145
2146    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2147                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2148    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2149                      lens_shading_map_size,
2150                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2151
2152    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2153                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2154    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2155            geo_correction_map_size,
2156            sizeof(geo_correction_map_size)/sizeof(int32_t));
2157
2158    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2159                       gCamCapability[cameraId]->geo_correction_map,
2160                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2161
2162    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2163            gCamCapability[cameraId]->sensor_physical_size, 2);
2164
2165    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2166            gCamCapability[cameraId]->exposure_time_range, 2);
2167
2168    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2169            &gCamCapability[cameraId]->max_frame_duration, 1);
2170
2171
2172    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2173                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2174
2175    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2176                                               gCamCapability[cameraId]->pixel_array_size.height};
2177    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2178                      pixel_array_size, 2);
2179
2180    int32_t active_array_size[] = {0, 0,
2181                                                gCamCapability[cameraId]->active_array_size.width,
2182                                                gCamCapability[cameraId]->active_array_size.height};
2183    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2184                      active_array_size, 4);
2185
2186    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2187            &gCamCapability[cameraId]->white_level, 1);
2188
2189    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2190            gCamCapability[cameraId]->black_level_pattern, 4);
2191
2192    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2193                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2194
2195    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2196                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2197
2198    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2199                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2200
2201    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2202                      &gCamCapability[cameraId]->histogram_size, 1);
2203
2204    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2205            &gCamCapability[cameraId]->max_histogram_count, 1);
2206
2207    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2208                                                gCamCapability[cameraId]->sharpness_map_size.height};
2209
2210    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2211            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2212
2213    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2214            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2215
2216
2217    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2218                      &gCamCapability[cameraId]->raw_min_duration,
2219                       1);
2220
2221    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2222                                                HAL_PIXEL_FORMAT_BLOB};
2223    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2224    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2225                      scalar_formats,
2226                      scalar_formats_count);
2227
2228    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2229    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2230              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2231              available_processed_sizes);
2232    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2233                available_processed_sizes,
2234                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2235
2236    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2237                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2238                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2239
2240    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2241    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2242                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2243                 available_fps_ranges);
2244    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2245            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2246
2247    camera_metadata_rational exposureCompensationStep = {
2248            gCamCapability[cameraId]->exp_compensation_step.numerator,
2249            gCamCapability[cameraId]->exp_compensation_step.denominator};
2250    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2251                      &exposureCompensationStep, 1);
2252
2253    /*TO DO*/
2254    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2255    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2256                      availableVstabModes, sizeof(availableVstabModes));
2257
2258    /*HAL 1 and HAL 3 common*/
2259    float maxZoom = 4;
2260    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2261            &maxZoom, 1);
2262
2263    int32_t max3aRegions = 1;
2264    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2265            &max3aRegions, 1);
2266
2267    uint8_t availableFaceDetectModes[] = {
2268            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2269            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2270    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2271                      availableFaceDetectModes,
2272                      sizeof(availableFaceDetectModes));
2273
2274    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2275                                       gCamCapability[cameraId]->raw_dim.height};
2276    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2277                      raw_size,
2278                      sizeof(raw_size)/sizeof(uint32_t));
2279
2280    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2281                                                        gCamCapability[cameraId]->exposure_compensation_max};
2282    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2283            exposureCompensationRange,
2284            sizeof(exposureCompensationRange)/sizeof(int32_t));
2285
2286    uint8_t lensFacing = (facingBack) ?
2287            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2288    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2289
2290    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2291                available_processed_sizes,
2292                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2293
2294    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2295                      available_thumbnail_sizes,
2296                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2297
2298    int32_t max_jpeg_size = 0;
2299    int temp_width, temp_height;
2300    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2301        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2302        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2303        if (temp_width * temp_height > max_jpeg_size ) {
2304            max_jpeg_size = temp_width * temp_height;
2305        }
2306    }
2307    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2308    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2309                      &max_jpeg_size, 1);
2310
2311    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2312    int32_t size = 0;
2313    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2314        int val = lookupFwkName(EFFECT_MODES_MAP,
2315                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2316                                   gCamCapability[cameraId]->supported_effects[i]);
2317        if (val != NAME_NOT_FOUND) {
2318            avail_effects[size] = (uint8_t)val;
2319            size++;
2320        }
2321    }
2322    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2323                      avail_effects,
2324                      size);
2325
2326    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2327    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2328    int32_t supported_scene_modes_cnt = 0;
2329    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2330        int val = lookupFwkName(SCENE_MODES_MAP,
2331                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2332                                gCamCapability[cameraId]->supported_scene_modes[i]);
2333        if (val != NAME_NOT_FOUND) {
2334            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2335            supported_indexes[supported_scene_modes_cnt] = i;
2336            supported_scene_modes_cnt++;
2337        }
2338    }
2339
2340    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2341                      avail_scene_modes,
2342                      supported_scene_modes_cnt);
2343
2344    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2345    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2346                      supported_scene_modes_cnt,
2347                      scene_mode_overrides,
2348                      supported_indexes,
2349                      cameraId);
2350    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2351                      scene_mode_overrides,
2352                      supported_scene_modes_cnt*3);
2353
2354    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2355    size = 0;
2356    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2357        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2358                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2359                                 gCamCapability[cameraId]->supported_antibandings[i]);
2360        if (val != NAME_NOT_FOUND) {
2361            avail_antibanding_modes[size] = (uint8_t)val;
2362            size++;
2363        }
2364
2365    }
2366    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2367                      avail_antibanding_modes,
2368                      size);
2369
2370    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2371    size = 0;
2372    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2373        int val = lookupFwkName(FOCUS_MODES_MAP,
2374                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2375                                gCamCapability[cameraId]->supported_focus_modes[i]);
2376        if (val != NAME_NOT_FOUND) {
2377            avail_af_modes[size] = (uint8_t)val;
2378            size++;
2379        }
2380    }
2381    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2382                      avail_af_modes,
2383                      size);
2384
2385    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2386    size = 0;
2387    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2388        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2389                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2390                                    gCamCapability[cameraId]->supported_white_balances[i]);
2391        if (val != NAME_NOT_FOUND) {
2392            avail_awb_modes[size] = (uint8_t)val;
2393            size++;
2394        }
2395    }
2396    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2397                      avail_awb_modes,
2398                      size);
2399
2400    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2401    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2402      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2403
2404    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2405            available_flash_levels,
2406            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2407
2408
2409    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2410    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2411            &flashAvailable, 1);
2412
2413    uint8_t avail_ae_modes[5];
2414    size = 0;
2415    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2416        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2417        size++;
2418    }
2419    if (flashAvailable) {
2420        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2421        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2422        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2423    }
2424    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2425                      avail_ae_modes,
2426                      size);
2427
2428    int32_t sensitivity_range[2];
2429    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2430    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2431    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2432                      sensitivity_range,
2433                      sizeof(sensitivity_range) / sizeof(int32_t));
2434
2435    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2436                      &gCamCapability[cameraId]->max_analog_sensitivity,
2437                      1);
2438
2439    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2440                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2441                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2442
2443    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2444    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2445                      &sensor_orientation,
2446                      1);
2447
2448    int32_t max_output_streams[3] = {1, 3, 1};
2449    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2450                      max_output_streams,
2451                      3);
2452
2453    gStaticMetadata[cameraId] = staticInfo.release();
2454    return rc;
2455}
2456
2457/*===========================================================================
2458 * FUNCTION   : makeTable
2459 *
2460 * DESCRIPTION: make a table of sizes
2461 *
2462 * PARAMETERS :
2463 *
2464 *
2465 *==========================================================================*/
2466void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2467                                          int32_t* sizeTable)
2468{
2469    int j = 0;
2470    for (int i = 0; i < size; i++) {
2471        sizeTable[j] = dimTable[i].width;
2472        sizeTable[j+1] = dimTable[i].height;
2473        j+=2;
2474    }
2475}
2476
2477/*===========================================================================
2478 * FUNCTION   : makeFPSTable
2479 *
2480 * DESCRIPTION: make a table of fps ranges
2481 *
2482 * PARAMETERS :
2483 *
2484 *==========================================================================*/
2485void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2486                                          int32_t* fpsRangesTable)
2487{
2488    int j = 0;
2489    for (int i = 0; i < size; i++) {
2490        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2491        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2492        j+=2;
2493    }
2494}
2495
2496/*===========================================================================
2497 * FUNCTION   : makeOverridesList
2498 *
2499 * DESCRIPTION: make a list of scene mode overrides
2500 *
2501 * PARAMETERS :
2502 *
2503 *
2504 *==========================================================================*/
2505void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2506                                                  uint8_t size, uint8_t* overridesList,
2507                                                  uint8_t* supported_indexes,
2508                                                  int camera_id)
2509{
2510    /*daemon will give a list of overrides for all scene modes.
2511      However we should send the fwk only the overrides for the scene modes
2512      supported by the framework*/
2513    int j = 0, index = 0, supt = 0;
2514    uint8_t focus_override;
2515    for (int i = 0; i < size; i++) {
2516        supt = 0;
2517        index = supported_indexes[i];
2518        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2519        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2520                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2521                                                    overridesTable[index].awb_mode);
2522        focus_override = (uint8_t)overridesTable[index].af_mode;
2523        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2524           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2525              supt = 1;
2526              break;
2527           }
2528        }
2529        if (supt) {
2530           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2531                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2532                                              focus_override);
2533        } else {
2534           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2535        }
2536        j+=3;
2537    }
2538}
2539
2540/*===========================================================================
2541 * FUNCTION   : getPreviewHalPixelFormat
2542 *
2543 * DESCRIPTION: convert the format to type recognized by framework
2544 *
2545 * PARAMETERS : format : the format from backend
2546 *
2547 ** RETURN    : format recognized by framework
2548 *
2549 *==========================================================================*/
2550int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2551{
2552    int32_t halPixelFormat;
2553
2554    switch (format) {
2555    case CAM_FORMAT_YUV_420_NV12:
2556        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2557        break;
2558    case CAM_FORMAT_YUV_420_NV21:
2559        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2560        break;
2561    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2562        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2563        break;
2564    case CAM_FORMAT_YUV_420_YV12:
2565        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2566        break;
2567    case CAM_FORMAT_YUV_422_NV16:
2568    case CAM_FORMAT_YUV_422_NV61:
2569    default:
2570        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2571        break;
2572    }
2573    return halPixelFormat;
2574}
2575
2576/*===========================================================================
2577 * FUNCTION   : getSensorSensitivity
2578 *
2579 * DESCRIPTION: convert iso_mode to an integer value
2580 *
2581 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2582 *
2583 ** RETURN    : sensitivity supported by sensor
2584 *
2585 *==========================================================================*/
2586int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2587{
2588    int32_t sensitivity;
2589
2590    switch (iso_mode) {
2591    case CAM_ISO_MODE_100:
2592        sensitivity = 100;
2593        break;
2594    case CAM_ISO_MODE_200:
2595        sensitivity = 200;
2596        break;
2597    case CAM_ISO_MODE_400:
2598        sensitivity = 400;
2599        break;
2600    case CAM_ISO_MODE_800:
2601        sensitivity = 800;
2602        break;
2603    case CAM_ISO_MODE_1600:
2604        sensitivity = 1600;
2605        break;
2606    default:
2607        sensitivity = -1;
2608        break;
2609    }
2610    return sensitivity;
2611}
2612
2613
2614/*===========================================================================
2615 * FUNCTION   : AddSetParmEntryToBatch
2616 *
2617 * DESCRIPTION: add set parameter entry into batch
2618 *
2619 * PARAMETERS :
2620 *   @p_table     : ptr to parameter buffer
2621 *   @paramType   : parameter type
2622 *   @paramLength : length of parameter value
2623 *   @paramValue  : ptr to parameter value
2624 *
2625 * RETURN     : int32_t type of status
2626 *              NO_ERROR  -- success
2627 *              none-zero failure code
2628 *==========================================================================*/
2629int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2630                                                          cam_intf_parm_type_t paramType,
2631                                                          uint32_t paramLength,
2632                                                          void *paramValue)
2633{
2634    int position = paramType;
2635    int current, next;
2636
2637    /*************************************************************************
2638    *                 Code to take care of linking next flags                *
2639    *************************************************************************/
2640    current = GET_FIRST_PARAM_ID(p_table);
2641    if (position == current){
2642        //DO NOTHING
2643    } else if (position < current){
2644        SET_NEXT_PARAM_ID(position, p_table, current);
2645        SET_FIRST_PARAM_ID(p_table, position);
2646    } else {
2647        /* Search for the position in the linked list where we need to slot in*/
2648        while (position > GET_NEXT_PARAM_ID(current, p_table))
2649            current = GET_NEXT_PARAM_ID(current, p_table);
2650
2651        /*If node already exists no need to alter linking*/
2652        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2653            next = GET_NEXT_PARAM_ID(current, p_table);
2654            SET_NEXT_PARAM_ID(current, p_table, position);
2655            SET_NEXT_PARAM_ID(position, p_table, next);
2656        }
2657    }
2658
2659    /*************************************************************************
2660    *                   Copy contents into entry                             *
2661    *************************************************************************/
2662
2663    if (paramLength > sizeof(parm_type_t)) {
2664        ALOGE("%s:Size of input larger than max entry size",__func__);
2665        return BAD_VALUE;
2666    }
2667    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2668    return NO_ERROR;
2669}
2670
2671/*===========================================================================
2672 * FUNCTION   : lookupFwkName
2673 *
2674 * DESCRIPTION: In case the enum is not same in fwk and backend
2675 *              make sure the parameter is correctly propogated
2676 *
2677 * PARAMETERS  :
2678 *   @arr      : map between the two enums
2679 *   @len      : len of the map
2680 *   @hal_name : name of the hal_parm to map
2681 *
2682 * RETURN     : int type of status
2683 *              fwk_name  -- success
2684 *              none-zero failure code
2685 *==========================================================================*/
2686int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2687                                             int len, int hal_name)
2688{
2689
2690    for (int i = 0; i < len; i++) {
2691        if (arr[i].hal_name == hal_name)
2692            return arr[i].fwk_name;
2693    }
2694
2695    /* Not able to find matching framework type is not necessarily
2696     * an error case. This happens when mm-camera supports more attributes
2697     * than the frameworks do */
2698    ALOGD("%s: Cannot find matching framework type", __func__);
2699    return NAME_NOT_FOUND;
2700}
2701
2702/*===========================================================================
2703 * FUNCTION   : lookupHalName
2704 *
2705 * DESCRIPTION: In case the enum is not same in fwk and backend
2706 *              make sure the parameter is correctly propogated
2707 *
2708 * PARAMETERS  :
2709 *   @arr      : map between the two enums
2710 *   @len      : len of the map
2711 *   @fwk_name : name of the hal_parm to map
2712 *
2713 * RETURN     : int32_t type of status
2714 *              hal_name  -- success
2715 *              none-zero failure code
2716 *==========================================================================*/
2717int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2718                                             int len, int fwk_name)
2719{
2720    for (int i = 0; i < len; i++) {
2721       if (arr[i].fwk_name == fwk_name)
2722           return arr[i].hal_name;
2723    }
2724    ALOGE("%s: Cannot find matching hal type", __func__);
2725    return NAME_NOT_FOUND;
2726}
2727
2728/*===========================================================================
2729 * FUNCTION   : getCapabilities
2730 *
2731 * DESCRIPTION: query camera capabilities
2732 *
2733 * PARAMETERS :
2734 *   @cameraId  : camera Id
2735 *   @info      : camera info struct to be filled in with camera capabilities
2736 *
2737 * RETURN     : int32_t type of status
2738 *              NO_ERROR  -- success
2739 *              none-zero failure code
2740 *==========================================================================*/
2741int QCamera3HardwareInterface::getCamInfo(int cameraId,
2742                                    struct camera_info *info)
2743{
2744    int rc = 0;
2745
2746    if (NULL == gCamCapability[cameraId]) {
2747        rc = initCapabilities(cameraId);
2748        if (rc < 0) {
2749            //pthread_mutex_unlock(&g_camlock);
2750            return rc;
2751        }
2752    }
2753
2754    if (NULL == gStaticMetadata[cameraId]) {
2755        rc = initStaticMetadata(cameraId);
2756        if (rc < 0) {
2757            return rc;
2758        }
2759    }
2760
2761    switch(gCamCapability[cameraId]->position) {
2762    case CAM_POSITION_BACK:
2763        info->facing = CAMERA_FACING_BACK;
2764        break;
2765
2766    case CAM_POSITION_FRONT:
2767        info->facing = CAMERA_FACING_FRONT;
2768        break;
2769
2770    default:
2771        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2772        rc = -1;
2773        break;
2774    }
2775
2776
2777    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2778    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2779    info->static_camera_characteristics = gStaticMetadata[cameraId];
2780
2781    return rc;
2782}
2783
2784/*===========================================================================
2785 * FUNCTION   : translateMetadata
2786 *
2787 * DESCRIPTION: translate the metadata into camera_metadata_t
2788 *
2789 * PARAMETERS : type of the request
2790 *
2791 *
2792 * RETURN     : success: camera_metadata_t*
2793 *              failure: NULL
2794 *
2795 *==========================================================================*/
2796camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2797{
2798    pthread_mutex_lock(&mMutex);
2799
2800    if (mDefaultMetadata[type] != NULL) {
2801        pthread_mutex_unlock(&mMutex);
2802        return mDefaultMetadata[type];
2803    }
2804    //first time we are handling this request
2805    //fill up the metadata structure using the wrapper class
2806    CameraMetadata settings;
2807    //translate from cam_capability_t to camera_metadata_tag_t
2808    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2809    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2810
2811    /*control*/
2812
2813    uint8_t controlIntent = 0;
2814    switch (type) {
2815      case CAMERA3_TEMPLATE_PREVIEW:
2816        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2817        break;
2818      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2819        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2820        break;
2821      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2822        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2823        break;
2824      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2825        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2826        break;
2827      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2828        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2829        break;
2830      default:
2831        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2832        break;
2833    }
2834    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2835
2836    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2837            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2838
2839    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2840    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2841
2842    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2843    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2844
2845    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2846    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2847
2848    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2849    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2850
2851    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2852    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2853
2854    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2855    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2856
2857    static uint8_t focusMode;
2858    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2859        ALOGE("%s: Setting focus mode to auto", __func__);
2860        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2861    } else {
2862        ALOGE("%s: Setting focus mode to off", __func__);
2863        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2864    }
2865    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2866
2867    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2868    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2869
2870    /*flash*/
2871    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2872    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2873
2874    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2875    settings.update(ANDROID_FLASH_FIRING_POWER,
2876            &flashFiringLevel, 1);
2877
2878    /* lens */
2879    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2880    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2881
2882    if (gCamCapability[mCameraId]->filter_densities_count) {
2883        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2884        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2885                        gCamCapability[mCameraId]->filter_densities_count);
2886    }
2887
2888    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2889    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2890
2891    /* frame duration */
2892    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2893    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2894
2895    /* sensitivity */
2896    static const int32_t default_sensitivity = 100;
2897    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2898
2899    /*edge mode*/
2900    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2901    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2902
2903    /*noise reduction mode*/
2904    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2905    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2906
2907    /*color correction mode*/
2908    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2909    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2910
2911    /*transform matrix mode*/
2912    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2913    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2914
2915    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
2916    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
2917
2918    mDefaultMetadata[type] = settings.release();
2919
2920    pthread_mutex_unlock(&mMutex);
2921    return mDefaultMetadata[type];
2922}
2923
2924/*===========================================================================
2925 * FUNCTION   : setFrameParameters
2926 *
2927 * DESCRIPTION: set parameters per frame as requested in the metadata from
2928 *              framework
2929 *
2930 * PARAMETERS :
2931 *   @request   : request that needs to be serviced
2932 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2933 *
2934 * RETURN     : success: NO_ERROR
2935 *              failure:
2936 *==========================================================================*/
2937int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2938                    uint32_t streamTypeMask)
2939{
2940    /*translate from camera_metadata_t type to parm_type_t*/
2941    int rc = 0;
2942    if (request->settings == NULL && mFirstRequest) {
2943        /*settings cannot be null for the first request*/
2944        return BAD_VALUE;
2945    }
2946
2947    int32_t hal_version = CAM_HAL_V3;
2948
2949    memset(mParameters, 0, sizeof(parm_buffer_t));
2950    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2951    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2952                sizeof(hal_version), &hal_version);
2953    if (rc < 0) {
2954        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2955        return BAD_VALUE;
2956    }
2957
2958    /*we need to update the frame number in the parameters*/
2959    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2960                                sizeof(request->frame_number), &(request->frame_number));
2961    if (rc < 0) {
2962        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2963        return BAD_VALUE;
2964    }
2965
2966    /* Update stream id mask where buffers are requested */
2967    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2968                                sizeof(streamTypeMask), &streamTypeMask);
2969    if (rc < 0) {
2970        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2971        return BAD_VALUE;
2972    }
2973
2974    if(request->settings != NULL){
2975        rc = translateMetadataToParameters(request);
2976    }
2977    /*set the parameters to backend*/
2978    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2979    return rc;
2980}
2981
2982/*===========================================================================
2983 * FUNCTION   : translateMetadataToParameters
2984 *
2985 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2986 *
2987 *
2988 * PARAMETERS :
2989 *   @request  : request sent from framework
2990 *
2991 *
2992 * RETURN     : success: NO_ERROR
2993 *              failure:
2994 *==========================================================================*/
2995int QCamera3HardwareInterface::translateMetadataToParameters
2996                                  (const camera3_capture_request_t *request)
2997{
2998    int rc = 0;
2999    CameraMetadata frame_settings;
3000    frame_settings = request->settings;
3001
3002    /* Do not change the order of the following list unless you know what you are
3003     * doing.
3004     * The order is laid out in such a way that parameters in the front of the table
3005     * may be used to override the parameters later in the table. Examples are:
3006     * 1. META_MODE should precede AEC/AWB/AF MODE
3007     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3008     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3009     * 4. Any mode should precede it's corresponding settings
3010     */
3011    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3012        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3013        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3014                sizeof(metaMode), &metaMode);
3015        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3016           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3017           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3018                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3019                                             fwk_sceneMode);
3020           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3021                sizeof(sceneMode), &sceneMode);
3022        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3023           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3024           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3025                sizeof(sceneMode), &sceneMode);
3026        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3027           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3028           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3029                sizeof(sceneMode), &sceneMode);
3030        }
3031    }
3032
3033    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3034        uint8_t fwk_aeMode =
3035            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3036        uint8_t aeMode;
3037        int32_t redeye;
3038
3039        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3040            aeMode = CAM_AE_MODE_OFF;
3041        } else {
3042            aeMode = CAM_AE_MODE_ON;
3043        }
3044        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3045            redeye = 1;
3046        } else {
3047            redeye = 0;
3048        }
3049
3050        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3051                                          sizeof(AE_FLASH_MODE_MAP),
3052                                          fwk_aeMode);
3053        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3054                sizeof(aeMode), &aeMode);
3055        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3056                sizeof(flashMode), &flashMode);
3057        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3058                sizeof(redeye), &redeye);
3059    }
3060
3061    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3062        uint8_t fwk_whiteLevel =
3063            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3064        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3065                sizeof(WHITE_BALANCE_MODES_MAP),
3066                fwk_whiteLevel);
3067        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3068                sizeof(whiteLevel), &whiteLevel);
3069    }
3070
3071    float focalDistance = -1.0;
3072    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3073        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3074        rc = AddSetParmEntryToBatch(mParameters,
3075                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3076                sizeof(focalDistance), &focalDistance);
3077    }
3078
3079    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3080        uint8_t fwk_focusMode =
3081            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3082        uint8_t focusMode;
3083        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3084            focusMode = CAM_FOCUS_MODE_INFINITY;
3085        } else{
3086         focusMode = lookupHalName(FOCUS_MODES_MAP,
3087                                   sizeof(FOCUS_MODES_MAP),
3088                                   fwk_focusMode);
3089        }
3090        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3091                sizeof(focusMode), &focusMode);
3092    }
3093
3094    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3095        int32_t antibandingMode =
3096            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3097        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3098                sizeof(antibandingMode), &antibandingMode);
3099    }
3100
3101    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3102        int32_t expCompensation = frame_settings.find(
3103            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3104        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3105            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3106        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3107            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3108        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3109          sizeof(expCompensation), &expCompensation);
3110    }
3111
3112    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3113        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3114        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3115                sizeof(aeLock), &aeLock);
3116    }
3117    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3118        cam_fps_range_t fps_range;
3119        fps_range.min_fps =
3120            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3121        fps_range.max_fps =
3122            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3123        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3124                sizeof(fps_range), &fps_range);
3125    }
3126
3127    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3128        uint8_t awbLock =
3129            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3130        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3131                sizeof(awbLock), &awbLock);
3132    }
3133
3134    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3135        uint8_t fwk_effectMode =
3136            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3137        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3138                sizeof(EFFECT_MODES_MAP),
3139                fwk_effectMode);
3140        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3141                sizeof(effectMode), &effectMode);
3142    }
3143
3144    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3145        uint8_t colorCorrectMode =
3146            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3147        rc =
3148            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3149                    sizeof(colorCorrectMode), &colorCorrectMode);
3150    }
3151
3152    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3153        cam_color_correct_gains_t colorCorrectGains;
3154        for (int i = 0; i < 4; i++) {
3155            colorCorrectGains.gains[i] =
3156                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3157        }
3158        rc =
3159            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3160                    sizeof(colorCorrectGains), &colorCorrectGains);
3161    }
3162
3163    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3164        cam_color_correct_matrix_t colorCorrectTransform;
3165        cam_rational_type_t transform_elem;
3166        int num = 0;
3167        for (int i = 0; i < 3; i++) {
3168           for (int j = 0; j < 3; j++) {
3169              transform_elem.numerator =
3170                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3171              transform_elem.denominator =
3172                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3173              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3174              num++;
3175           }
3176        }
3177        rc =
3178            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3179                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3180    }
3181
3182    cam_trigger_t aecTrigger;
3183    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3184    aecTrigger.trigger_id = -1;
3185    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3186        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3187        aecTrigger.trigger =
3188            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3189        aecTrigger.trigger_id =
3190            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3191    }
3192    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3193                                sizeof(aecTrigger), &aecTrigger);
3194
3195    /*af_trigger must come with a trigger id*/
3196    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3197        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3198        cam_trigger_t af_trigger;
3199        af_trigger.trigger =
3200            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3201        af_trigger.trigger_id =
3202            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3203        rc = AddSetParmEntryToBatch(mParameters,
3204                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3205    }
3206
3207    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3208        int32_t demosaic =
3209            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3210        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3211                sizeof(demosaic), &demosaic);
3212    }
3213
3214    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3215        cam_edge_application_t edge_application;
3216        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3217        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3218            edge_application.sharpness = 0;
3219        } else {
3220            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3221                int32_t edgeStrength =
3222                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3223                edge_application.sharpness = edgeStrength;
3224            } else {
3225                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3226            }
3227        }
3228        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3229                sizeof(edge_application), &edge_application);
3230    }
3231
3232    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3233        int32_t respectFlashMode = 1;
3234        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3235            uint8_t fwk_aeMode =
3236                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3237            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3238                respectFlashMode = 0;
3239                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3240                    __func__);
3241            }
3242        }
3243        if (respectFlashMode) {
3244            uint8_t flashMode =
3245                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3246            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3247                                          sizeof(FLASH_MODES_MAP),
3248                                          flashMode);
3249            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3250            // To check: CAM_INTF_META_FLASH_MODE usage
3251            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3252                          sizeof(flashMode), &flashMode);
3253        }
3254    }
3255
3256    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3257        uint8_t flashPower =
3258            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3259        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3260                sizeof(flashPower), &flashPower);
3261    }
3262
3263    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3264        int64_t flashFiringTime =
3265            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3266        rc = AddSetParmEntryToBatch(mParameters,
3267                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3268    }
3269
3270    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3271        uint8_t geometricMode =
3272            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3273        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3274                sizeof(geometricMode), &geometricMode);
3275    }
3276
3277    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3278        uint8_t geometricStrength =
3279            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3280        rc = AddSetParmEntryToBatch(mParameters,
3281                CAM_INTF_META_GEOMETRIC_STRENGTH,
3282                sizeof(geometricStrength), &geometricStrength);
3283    }
3284
3285    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3286        uint8_t hotPixelMode =
3287            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3288        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3289                sizeof(hotPixelMode), &hotPixelMode);
3290    }
3291
3292    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3293        float lensAperture =
3294            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3295        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3296                sizeof(lensAperture), &lensAperture);
3297    }
3298
3299    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3300        float filterDensity =
3301            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3302        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3303                sizeof(filterDensity), &filterDensity);
3304    }
3305
3306    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3307        float focalLength =
3308            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3309        rc = AddSetParmEntryToBatch(mParameters,
3310                CAM_INTF_META_LENS_FOCAL_LENGTH,
3311                sizeof(focalLength), &focalLength);
3312    }
3313
3314    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3315        uint8_t optStabMode =
3316            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3317        rc = AddSetParmEntryToBatch(mParameters,
3318                CAM_INTF_META_LENS_OPT_STAB_MODE,
3319                sizeof(optStabMode), &optStabMode);
3320    }
3321
3322    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3323        uint8_t noiseRedMode =
3324            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3325        rc = AddSetParmEntryToBatch(mParameters,
3326                CAM_INTF_META_NOISE_REDUCTION_MODE,
3327                sizeof(noiseRedMode), &noiseRedMode);
3328    }
3329
3330    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3331        uint8_t noiseRedStrength =
3332            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3333        rc = AddSetParmEntryToBatch(mParameters,
3334                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3335                sizeof(noiseRedStrength), &noiseRedStrength);
3336    }
3337
3338    cam_crop_region_t scalerCropRegion;
3339    bool scalerCropSet = false;
3340    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3341        scalerCropRegion.left =
3342            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3343        scalerCropRegion.top =
3344            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3345        scalerCropRegion.width =
3346            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3347        scalerCropRegion.height =
3348            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3349        rc = AddSetParmEntryToBatch(mParameters,
3350                CAM_INTF_META_SCALER_CROP_REGION,
3351                sizeof(scalerCropRegion), &scalerCropRegion);
3352        scalerCropSet = true;
3353    }
3354
3355    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3356        int64_t sensorExpTime =
3357            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3358        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3359        rc = AddSetParmEntryToBatch(mParameters,
3360                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3361                sizeof(sensorExpTime), &sensorExpTime);
3362    }
3363
3364    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3365        int64_t sensorFrameDuration =
3366            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3367        int64_t minFrameDuration = getMinFrameDuration(request);
3368        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3369        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3370            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3371        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3372        rc = AddSetParmEntryToBatch(mParameters,
3373                CAM_INTF_META_SENSOR_FRAME_DURATION,
3374                sizeof(sensorFrameDuration), &sensorFrameDuration);
3375    }
3376
3377    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3378        int32_t sensorSensitivity =
3379            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3380        if (sensorSensitivity <
3381                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3382            sensorSensitivity =
3383                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3384        if (sensorSensitivity >
3385                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3386            sensorSensitivity =
3387                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3388        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3389        rc = AddSetParmEntryToBatch(mParameters,
3390                CAM_INTF_META_SENSOR_SENSITIVITY,
3391                sizeof(sensorSensitivity), &sensorSensitivity);
3392    }
3393
3394    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3395        int32_t shadingMode =
3396            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3397        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3398                sizeof(shadingMode), &shadingMode);
3399    }
3400
3401    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3402        uint8_t shadingStrength =
3403            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3404        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3405                sizeof(shadingStrength), &shadingStrength);
3406    }
3407
3408    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3409        uint8_t fwk_facedetectMode =
3410            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3411        uint8_t facedetectMode =
3412            lookupHalName(FACEDETECT_MODES_MAP,
3413                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3414        rc = AddSetParmEntryToBatch(mParameters,
3415                CAM_INTF_META_STATS_FACEDETECT_MODE,
3416                sizeof(facedetectMode), &facedetectMode);
3417    }
3418
3419    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3420        uint8_t histogramMode =
3421            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3422        rc = AddSetParmEntryToBatch(mParameters,
3423                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3424                sizeof(histogramMode), &histogramMode);
3425    }
3426
3427    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3428        uint8_t sharpnessMapMode =
3429            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3430        rc = AddSetParmEntryToBatch(mParameters,
3431                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3432                sizeof(sharpnessMapMode), &sharpnessMapMode);
3433    }
3434
3435    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3436        uint8_t tonemapMode =
3437            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3438        rc = AddSetParmEntryToBatch(mParameters,
3439                CAM_INTF_META_TONEMAP_MODE,
3440                sizeof(tonemapMode), &tonemapMode);
3441    }
3442    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3443    /*All tonemap channels will have the same number of points*/
3444    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3445        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3446        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3447        cam_rgb_tonemap_curves tonemapCurves;
3448        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3449
3450        /* ch0 = G*/
3451        int point = 0;
3452        cam_tonemap_curve_t tonemapCurveGreen;
3453        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3454            for (int j = 0; j < 2; j++) {
3455               tonemapCurveGreen.tonemap_points[i][j] =
3456                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3457               point++;
3458            }
3459        }
3460        tonemapCurves.curves[0] = tonemapCurveGreen;
3461
3462        /* ch 1 = B */
3463        point = 0;
3464        cam_tonemap_curve_t tonemapCurveBlue;
3465        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3466            for (int j = 0; j < 2; j++) {
3467               tonemapCurveBlue.tonemap_points[i][j] =
3468                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3469               point++;
3470            }
3471        }
3472        tonemapCurves.curves[1] = tonemapCurveBlue;
3473
3474        /* ch 2 = R */
3475        point = 0;
3476        cam_tonemap_curve_t tonemapCurveRed;
3477        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3478            for (int j = 0; j < 2; j++) {
3479               tonemapCurveRed.tonemap_points[i][j] =
3480                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3481               point++;
3482            }
3483        }
3484        tonemapCurves.curves[2] = tonemapCurveRed;
3485
3486        rc = AddSetParmEntryToBatch(mParameters,
3487                CAM_INTF_META_TONEMAP_CURVES,
3488                sizeof(tonemapCurves), &tonemapCurves);
3489    }
3490
3491    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3492        uint8_t captureIntent =
3493            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3494        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3495                sizeof(captureIntent), &captureIntent);
3496    }
3497
3498    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3499        uint8_t blackLevelLock =
3500            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3501        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3502                sizeof(blackLevelLock), &blackLevelLock);
3503    }
3504
3505    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3506        uint8_t lensShadingMapMode =
3507            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3508        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3509                sizeof(lensShadingMapMode), &lensShadingMapMode);
3510    }
3511
3512    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3513        cam_area_t roi;
3514        bool reset = true;
3515        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3516        if (scalerCropSet) {
3517            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3518        }
3519        if (reset) {
3520            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3521                    sizeof(roi), &roi);
3522        }
3523    }
3524
3525    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3526        cam_area_t roi;
3527        bool reset = true;
3528        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3529        if (scalerCropSet) {
3530            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3531        }
3532        if (reset) {
3533            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3534                    sizeof(roi), &roi);
3535        }
3536    }
3537
3538    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3539        cam_area_t roi;
3540        bool reset = true;
3541        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3542        if (scalerCropSet) {
3543            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3544        }
3545        if (reset) {
3546            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3547                    sizeof(roi), &roi);
3548        }
3549    }
3550    return rc;
3551}
3552
3553/*===========================================================================
3554 * FUNCTION   : getJpegSettings
3555 *
3556 * DESCRIPTION: save the jpeg settings in the HAL
3557 *
3558 *
3559 * PARAMETERS :
3560 *   @settings  : frame settings information from framework
3561 *
3562 *
3563 * RETURN     : success: NO_ERROR
3564 *              failure:
3565 *==========================================================================*/
3566int QCamera3HardwareInterface::getJpegSettings
3567                                  (const camera_metadata_t *settings)
3568{
3569    if (mJpegSettings) {
3570        if (mJpegSettings->gps_timestamp) {
3571            free(mJpegSettings->gps_timestamp);
3572            mJpegSettings->gps_timestamp = NULL;
3573        }
3574        if (mJpegSettings->gps_coordinates) {
3575            for (int i = 0; i < 3; i++) {
3576                free(mJpegSettings->gps_coordinates[i]);
3577                mJpegSettings->gps_coordinates[i] = NULL;
3578            }
3579        }
3580        free(mJpegSettings);
3581        mJpegSettings = NULL;
3582    }
3583    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3584    CameraMetadata jpeg_settings;
3585    jpeg_settings = settings;
3586
3587    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3588        mJpegSettings->jpeg_orientation =
3589            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3590    } else {
3591        mJpegSettings->jpeg_orientation = 0;
3592    }
3593    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3594        mJpegSettings->jpeg_quality =
3595            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3596    } else {
3597        mJpegSettings->jpeg_quality = 85;
3598    }
3599    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3600        mJpegSettings->thumbnail_size.width =
3601            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3602        mJpegSettings->thumbnail_size.height =
3603            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3604    } else {
3605        mJpegSettings->thumbnail_size.width = 0;
3606        mJpegSettings->thumbnail_size.height = 0;
3607    }
3608    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3609        for (int i = 0; i < 3; i++) {
3610            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3611            *(mJpegSettings->gps_coordinates[i]) =
3612                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3613        }
3614    } else{
3615       for (int i = 0; i < 3; i++) {
3616            mJpegSettings->gps_coordinates[i] = NULL;
3617        }
3618    }
3619
3620    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3621        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3622        *(mJpegSettings->gps_timestamp) =
3623            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3624    } else {
3625        mJpegSettings->gps_timestamp = NULL;
3626    }
3627
3628    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3629        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3630        for (int i = 0; i < len; i++) {
3631            mJpegSettings->gps_processing_method[i] =
3632                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3633        }
3634        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3635            mJpegSettings->gps_processing_method[len] = '\0';
3636        }
3637    } else {
3638        mJpegSettings->gps_processing_method[0] = '\0';
3639    }
3640
3641    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3642        mJpegSettings->sensor_sensitivity =
3643            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3644    } else {
3645        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3646    }
3647
3648    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3649
3650    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3651        mJpegSettings->lens_focal_length =
3652            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3653    }
3654    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3655        mJpegSettings->exposure_compensation =
3656            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3657    }
3658    mJpegSettings->sharpness = 10; //default value
3659    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3660        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3661        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3662            mJpegSettings->sharpness = 0;
3663        }
3664    }
3665    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3666    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3667    mJpegSettings->is_jpeg_format = true;
3668    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3669    return 0;
3670}
3671
3672/*===========================================================================
3673 * FUNCTION   : captureResultCb
3674 *
3675 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3676 *
3677 * PARAMETERS :
3678 *   @frame  : frame information from mm-camera-interface
3679 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3680 *   @userdata: userdata
3681 *
3682 * RETURN     : NONE
3683 *==========================================================================*/
3684void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3685                camera3_stream_buffer_t *buffer,
3686                uint32_t frame_number, void *userdata)
3687{
3688    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3689    if (hw == NULL) {
3690        ALOGE("%s: Invalid hw %p", __func__, hw);
3691        return;
3692    }
3693
3694    hw->captureResultCb(metadata, buffer, frame_number);
3695    return;
3696}
3697
3698
3699/*===========================================================================
3700 * FUNCTION   : initialize
3701 *
3702 * DESCRIPTION: Pass framework callback pointers to HAL
3703 *
3704 * PARAMETERS :
3705 *
3706 *
3707 * RETURN     : Success : 0
3708 *              Failure: -ENODEV
3709 *==========================================================================*/
3710
3711int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3712                                  const camera3_callback_ops_t *callback_ops)
3713{
3714    ALOGV("%s: E", __func__);
3715    QCamera3HardwareInterface *hw =
3716        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3717    if (!hw) {
3718        ALOGE("%s: NULL camera device", __func__);
3719        return -ENODEV;
3720    }
3721
3722    int rc = hw->initialize(callback_ops);
3723    ALOGV("%s: X", __func__);
3724    return rc;
3725}
3726
3727/*===========================================================================
3728 * FUNCTION   : configure_streams
3729 *
3730 * DESCRIPTION:
3731 *
3732 * PARAMETERS :
3733 *
3734 *
3735 * RETURN     : Success: 0
3736 *              Failure: -EINVAL (if stream configuration is invalid)
3737 *                       -ENODEV (fatal error)
3738 *==========================================================================*/
3739
3740int QCamera3HardwareInterface::configure_streams(
3741        const struct camera3_device *device,
3742        camera3_stream_configuration_t *stream_list)
3743{
3744    ALOGV("%s: E", __func__);
3745    QCamera3HardwareInterface *hw =
3746        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3747    if (!hw) {
3748        ALOGE("%s: NULL camera device", __func__);
3749        return -ENODEV;
3750    }
3751    int rc = hw->configureStreams(stream_list);
3752    ALOGV("%s: X", __func__);
3753    return rc;
3754}
3755
3756/*===========================================================================
3757 * FUNCTION   : register_stream_buffers
3758 *
3759 * DESCRIPTION: Register stream buffers with the device
3760 *
3761 * PARAMETERS :
3762 *
3763 * RETURN     :
3764 *==========================================================================*/
3765int QCamera3HardwareInterface::register_stream_buffers(
3766        const struct camera3_device *device,
3767        const camera3_stream_buffer_set_t *buffer_set)
3768{
3769    ALOGV("%s: E", __func__);
3770    QCamera3HardwareInterface *hw =
3771        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3772    if (!hw) {
3773        ALOGE("%s: NULL camera device", __func__);
3774        return -ENODEV;
3775    }
3776    int rc = hw->registerStreamBuffers(buffer_set);
3777    ALOGV("%s: X", __func__);
3778    return rc;
3779}
3780
3781/*===========================================================================
3782 * FUNCTION   : construct_default_request_settings
3783 *
3784 * DESCRIPTION: Configure a settings buffer to meet the required use case
3785 *
3786 * PARAMETERS :
3787 *
3788 *
3789 * RETURN     : Success: Return valid metadata
3790 *              Failure: Return NULL
3791 *==========================================================================*/
3792const camera_metadata_t* QCamera3HardwareInterface::
3793    construct_default_request_settings(const struct camera3_device *device,
3794                                        int type)
3795{
3796
3797    ALOGV("%s: E", __func__);
3798    camera_metadata_t* fwk_metadata = NULL;
3799    QCamera3HardwareInterface *hw =
3800        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3801    if (!hw) {
3802        ALOGE("%s: NULL camera device", __func__);
3803        return NULL;
3804    }
3805
3806    fwk_metadata = hw->translateCapabilityToMetadata(type);
3807
3808    ALOGV("%s: X", __func__);
3809    return fwk_metadata;
3810}
3811
3812/*===========================================================================
3813 * FUNCTION   : process_capture_request
3814 *
3815 * DESCRIPTION:
3816 *
3817 * PARAMETERS :
3818 *
3819 *
3820 * RETURN     :
3821 *==========================================================================*/
3822int QCamera3HardwareInterface::process_capture_request(
3823                    const struct camera3_device *device,
3824                    camera3_capture_request_t *request)
3825{
3826    ALOGV("%s: E", __func__);
3827    QCamera3HardwareInterface *hw =
3828        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3829    if (!hw) {
3830        ALOGE("%s: NULL camera device", __func__);
3831        return -EINVAL;
3832    }
3833
3834    int rc = hw->processCaptureRequest(request);
3835    ALOGV("%s: X", __func__);
3836    return rc;
3837}
3838
3839/*===========================================================================
3840 * FUNCTION   : get_metadata_vendor_tag_ops
3841 *
3842 * DESCRIPTION:
3843 *
3844 * PARAMETERS :
3845 *
3846 *
3847 * RETURN     :
3848 *==========================================================================*/
3849
3850void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3851                const struct camera3_device *device,
3852                vendor_tag_query_ops_t* ops)
3853{
3854    ALOGV("%s: E", __func__);
3855    QCamera3HardwareInterface *hw =
3856        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3857    if (!hw) {
3858        ALOGE("%s: NULL camera device", __func__);
3859        return;
3860    }
3861
3862    hw->getMetadataVendorTagOps(ops);
3863    ALOGV("%s: X", __func__);
3864    return;
3865}
3866
3867/*===========================================================================
3868 * FUNCTION   : dump
3869 *
3870 * DESCRIPTION:
3871 *
3872 * PARAMETERS :
3873 *
3874 *
3875 * RETURN     :
3876 *==========================================================================*/
3877
3878void QCamera3HardwareInterface::dump(
3879                const struct camera3_device *device, int fd)
3880{
3881    ALOGV("%s: E", __func__);
3882    QCamera3HardwareInterface *hw =
3883        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3884    if (!hw) {
3885        ALOGE("%s: NULL camera device", __func__);
3886        return;
3887    }
3888
3889    hw->dump(fd);
3890    ALOGV("%s: X", __func__);
3891    return;
3892}
3893
3894/*===========================================================================
3895 * FUNCTION   : flush
3896 *
3897 * DESCRIPTION:
3898 *
3899 * PARAMETERS :
3900 *
3901 *
3902 * RETURN     :
3903 *==========================================================================*/
3904
3905int QCamera3HardwareInterface::flush(
3906                const struct camera3_device *device)
3907{
3908    int rc;
3909    ALOGV("%s: E", __func__);
3910    QCamera3HardwareInterface *hw =
3911        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3912    if (!hw) {
3913        ALOGE("%s: NULL camera device", __func__);
3914        return -EINVAL;
3915    }
3916
3917    rc = hw->flush();
3918    ALOGV("%s: X", __func__);
3919    return rc;
3920}
3921
3922/*===========================================================================
3923 * FUNCTION   : close_camera_device
3924 *
3925 * DESCRIPTION:
3926 *
3927 * PARAMETERS :
3928 *
3929 *
3930 * RETURN     :
3931 *==========================================================================*/
3932int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3933{
3934    ALOGV("%s: E", __func__);
3935    int ret = NO_ERROR;
3936    QCamera3HardwareInterface *hw =
3937        reinterpret_cast<QCamera3HardwareInterface *>(
3938            reinterpret_cast<camera3_device_t *>(device)->priv);
3939    if (!hw) {
3940        ALOGE("NULL camera device");
3941        return BAD_VALUE;
3942    }
3943    delete hw;
3944
3945    pthread_mutex_lock(&mCameraSessionLock);
3946    mCameraSessionActive = 0;
3947    pthread_mutex_unlock(&mCameraSessionLock);
3948    ALOGV("%s: X", __func__);
3949    return ret;
3950}
3951
3952/*===========================================================================
3953 * FUNCTION   : getWaveletDenoiseProcessPlate
3954 *
3955 * DESCRIPTION: query wavelet denoise process plate
3956 *
3957 * PARAMETERS : None
3958 *
3959 * RETURN     : WNR prcocess plate vlaue
3960 *==========================================================================*/
3961cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3962{
3963    char prop[PROPERTY_VALUE_MAX];
3964    memset(prop, 0, sizeof(prop));
3965    property_get("persist.denoise.process.plates", prop, "0");
3966    int processPlate = atoi(prop);
3967    switch(processPlate) {
3968    case 0:
3969        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3970    case 1:
3971        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3972    case 2:
3973        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3974    case 3:
3975        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3976    default:
3977        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3978    }
3979}
3980
3981/*===========================================================================
3982 * FUNCTION   : needRotationReprocess
3983 *
3984 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3985 *
3986 * PARAMETERS : none
3987 *
3988 * RETURN     : true: needed
3989 *              false: no need
3990 *==========================================================================*/
3991bool QCamera3HardwareInterface::needRotationReprocess()
3992{
3993
3994    if (!mJpegSettings->is_jpeg_format) {
3995        // RAW image, no need to reprocess
3996        return false;
3997    }
3998
3999    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4000        mJpegSettings->jpeg_orientation > 0) {
4001        // current rotation is not zero, and pp has the capability to process rotation
4002        ALOGD("%s: need do reprocess for rotation", __func__);
4003        return true;
4004    }
4005
4006    return false;
4007}
4008
4009/*===========================================================================
4010 * FUNCTION   : needReprocess
4011 *
4012 * DESCRIPTION: if reprocess in needed
4013 *
4014 * PARAMETERS : none
4015 *
4016 * RETURN     : true: needed
4017 *              false: no need
4018 *==========================================================================*/
4019bool QCamera3HardwareInterface::needReprocess()
4020{
4021    if (!mJpegSettings->is_jpeg_format) {
4022        // RAW image, no need to reprocess
4023        return false;
4024    }
4025
4026    if ((mJpegSettings->min_required_pp_mask > 0) ||
4027         isWNREnabled()) {
4028        // TODO: add for ZSL HDR later
4029        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4030        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4031        return true;
4032    }
4033    return needRotationReprocess();
4034}
4035
4036/*===========================================================================
4037 * FUNCTION   : addOnlineReprocChannel
4038 *
4039 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4040 *              coming from input channel
4041 *
4042 * PARAMETERS :
4043 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4044 *
4045 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4046 *==========================================================================*/
4047QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4048              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4049{
4050    int32_t rc = NO_ERROR;
4051    QCamera3ReprocessChannel *pChannel = NULL;
4052    if (pInputChannel == NULL) {
4053        ALOGE("%s: input channel obj is NULL", __func__);
4054        return NULL;
4055    }
4056
4057    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4058            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4059    if (NULL == pChannel) {
4060        ALOGE("%s: no mem for reprocess channel", __func__);
4061        return NULL;
4062    }
4063
4064    // Capture channel, only need snapshot and postview streams start together
4065    mm_camera_channel_attr_t attr;
4066    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4067    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4068    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4069    rc = pChannel->initialize();
4070    if (rc != NO_ERROR) {
4071        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4072        delete pChannel;
4073        return NULL;
4074    }
4075
4076    // pp feature config
4077    cam_pp_feature_config_t pp_config;
4078    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4079    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4080        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4081        pp_config.sharpness = mJpegSettings->sharpness;
4082    }
4083
4084    if (isWNREnabled()) {
4085        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4086        pp_config.denoise2d.denoise_enable = 1;
4087        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4088    }
4089    if (needRotationReprocess()) {
4090        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4091        int rotation = mJpegSettings->jpeg_orientation;
4092        if (rotation == 0) {
4093            pp_config.rotation = ROTATE_0;
4094        } else if (rotation == 90) {
4095            pp_config.rotation = ROTATE_90;
4096        } else if (rotation == 180) {
4097            pp_config.rotation = ROTATE_180;
4098        } else if (rotation == 270) {
4099            pp_config.rotation = ROTATE_270;
4100        }
4101    }
4102
4103   rc = pChannel->addReprocStreamsFromSource(pp_config,
4104                                             pInputChannel,
4105                                             mMetadataChannel);
4106
4107    if (rc != NO_ERROR) {
4108        delete pChannel;
4109        return NULL;
4110    }
4111    return pChannel;
4112}
4113
4114int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4115{
4116    return gCamCapability[mCameraId]->min_num_pp_bufs;
4117}
4118
4119bool QCamera3HardwareInterface::isWNREnabled() {
4120    return gCamCapability[mCameraId]->isWnrSupported;
4121}
4122
4123}; //end namespace qcamera
4124