QCamera3HWI.cpp revision d363f94a30706c0f951d037b3601bc2a0b445837
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    //Create metadata channel and initialize it
405    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
406                    mCameraHandle->ops, captureResultCb,
407                    &gCamCapability[mCameraId]->padding_info, this);
408    if (mMetadataChannel == NULL) {
409        ALOGE("%s: failed to allocate metadata channel", __func__);
410        rc = -ENOMEM;
411        goto err2;
412    }
413    rc = mMetadataChannel->initialize();
414    if (rc < 0) {
415        ALOGE("%s: metadata channel initialization failed", __func__);
416        goto err3;
417    }
418
419    mCallbackOps = callback_ops;
420
421    pthread_mutex_unlock(&mMutex);
422    mCameraInitialized = true;
423    return 0;
424
425err3:
426    delete mMetadataChannel;
427    mMetadataChannel = NULL;
428err2:
429    deinitParameters();
430err1:
431    pthread_mutex_unlock(&mMutex);
432    return rc;
433}
434
435/*===========================================================================
436 * FUNCTION   : configureStreams
437 *
438 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
439 *              and output streams.
440 *
441 * PARAMETERS :
442 *   @stream_list : streams to be configured
443 *
444 * RETURN     :
445 *
446 *==========================================================================*/
447int QCamera3HardwareInterface::configureStreams(
448        camera3_stream_configuration_t *streamList)
449{
450    int rc = 0;
451    mIsZslMode = false;
452    pthread_mutex_lock(&mMutex);
453    // Sanity check stream_list
454    if (streamList == NULL) {
455        ALOGE("%s: NULL stream configuration", __func__);
456        pthread_mutex_unlock(&mMutex);
457        return BAD_VALUE;
458    }
459
460    if (streamList->streams == NULL) {
461        ALOGE("%s: NULL stream list", __func__);
462        pthread_mutex_unlock(&mMutex);
463        return BAD_VALUE;
464    }
465
466    if (streamList->num_streams < 1) {
467        ALOGE("%s: Bad number of streams requested: %d", __func__,
468                streamList->num_streams);
469        pthread_mutex_unlock(&mMutex);
470        return BAD_VALUE;
471    }
472
473    camera3_stream_t *inputStream = NULL;
474    camera3_stream_t *jpegStream = NULL;
475    cam_stream_size_info_t stream_config_info;
476
477    /* first invalidate all the steams in the mStreamList
478     * if they appear again, they will be validated */
479    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
480            it != mStreamInfo.end(); it++) {
481        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
482        channel->stop();
483        (*it)->status = INVALID;
484    }
485
486    for (size_t i = 0; i < streamList->num_streams; i++) {
487        camera3_stream_t *newStream = streamList->streams[i];
488        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
489                __func__, newStream->stream_type, newStream->format,
490                 newStream->width, newStream->height);
491        //if the stream is in the mStreamList validate it
492        bool stream_exists = false;
493        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
494                it != mStreamInfo.end(); it++) {
495            if ((*it)->stream == newStream) {
496                QCamera3Channel *channel =
497                    (QCamera3Channel*)(*it)->stream->priv;
498                stream_exists = true;
499                (*it)->status = RECONFIGURE;
500                /*delete the channel object associated with the stream because
501                  we need to reconfigure*/
502                delete channel;
503                (*it)->stream->priv = NULL;
504            }
505        }
506        if (!stream_exists) {
507            //new stream
508            stream_info_t* stream_info;
509            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
510            stream_info->stream = newStream;
511            stream_info->status = VALID;
512            stream_info->registered = 0;
513            mStreamInfo.push_back(stream_info);
514        }
515        if (newStream->stream_type == CAMERA3_STREAM_INPUT
516                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
517            if (inputStream != NULL) {
518                ALOGE("%s: Multiple input streams requested!", __func__);
519                pthread_mutex_unlock(&mMutex);
520                return BAD_VALUE;
521            }
522            inputStream = newStream;
523        }
524        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
525            jpegStream = newStream;
526        }
527    }
528    mInputStream = inputStream;
529
530    /*clean up invalid streams*/
531    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
532            it != mStreamInfo.end();) {
533        if(((*it)->status) == INVALID){
534            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
535            delete channel;
536            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
537            free(*it);
538            it = mStreamInfo.erase(it);
539        } else {
540            it++;
541        }
542    }
543
544    //mMetadataChannel->stop();
545
546    /* Allocate channel objects for the requested streams */
547    for (size_t i = 0; i < streamList->num_streams; i++) {
548        camera3_stream_t *newStream = streamList->streams[i];
549        stream_config_info.stream_sizes[i].width = newStream->width;
550        stream_config_info.stream_sizes[i].height = newStream->height;
551        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
552            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
553            //for zsl stream the size is jpeg size
554            stream_config_info.stream_sizes[i].width = jpegStream->width;
555            stream_config_info.stream_sizes[i].height = jpegStream->height;
556        }
557        if (newStream->priv == NULL) {
558            //New stream, construct channel
559            switch (newStream->stream_type) {
560            case CAMERA3_STREAM_INPUT:
561                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
562                break;
563            case CAMERA3_STREAM_BIDIRECTIONAL:
564                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
565                    GRALLOC_USAGE_HW_CAMERA_WRITE;
566                break;
567            case CAMERA3_STREAM_OUTPUT:
568                /* For video encoding stream, set read/write rarely
569                 * flag so that they may be set to un-cached */
570                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
571                    newStream->usage =
572                         (GRALLOC_USAGE_SW_READ_RARELY |
573                         GRALLOC_USAGE_SW_WRITE_RARELY |
574                         GRALLOC_USAGE_HW_CAMERA_WRITE);
575                else
576                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
577                break;
578            default:
579                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
580                break;
581            }
582
583            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
584                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
585                QCamera3Channel *channel;
586                switch (newStream->format) {
587                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
588                case HAL_PIXEL_FORMAT_YCbCr_420_888:
589                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
590                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
591                        jpegStream) {
592                        uint32_t width = jpegStream->width;
593                        uint32_t height = jpegStream->height;
594                        mIsZslMode = true;
595                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
596                            mCameraHandle->ops, captureResultCb,
597                            &gCamCapability[mCameraId]->padding_info, this, newStream,
598                            width, height);
599                    } else
600                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
601                            mCameraHandle->ops, captureResultCb,
602                            &gCamCapability[mCameraId]->padding_info, this, newStream);
603                    if (channel == NULL) {
604                        ALOGE("%s: allocation of channel failed", __func__);
605                        pthread_mutex_unlock(&mMutex);
606                        return -ENOMEM;
607                    }
608
609                    newStream->priv = channel;
610                    break;
611                case HAL_PIXEL_FORMAT_BLOB:
612                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
613                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
614                            mCameraHandle->ops, captureResultCb,
615                            &gCamCapability[mCameraId]->padding_info, this, newStream);
616                    if (mPictureChannel == NULL) {
617                        ALOGE("%s: allocation of channel failed", __func__);
618                        pthread_mutex_unlock(&mMutex);
619                        return -ENOMEM;
620                    }
621                    newStream->priv = (QCamera3Channel*)mPictureChannel;
622                    break;
623
624                //TODO: Add support for app consumed format?
625                default:
626                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
627                    break;
628                }
629            }
630        } else {
631            // Channel already exists for this stream
632            // Do nothing for now
633        }
634    }
635    /*For the streams to be reconfigured we need to register the buffers
636      since the framework wont*/
637    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
638            it != mStreamInfo.end(); it++) {
639        if ((*it)->status == RECONFIGURE) {
640            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
641            /*only register buffers for streams that have already been
642              registered*/
643            if ((*it)->registered) {
644                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
645                        (*it)->buffer_set.buffers);
646                if (rc != NO_ERROR) {
647                    ALOGE("%s: Failed to register the buffers of old stream,\
648                            rc = %d", __func__, rc);
649                }
650                ALOGV("%s: channel %p has %d buffers",
651                        __func__, channel, (*it)->buffer_set.num_buffers);
652            }
653        }
654
655        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
656        if (index == NAME_NOT_FOUND) {
657            mPendingBuffersMap.add((*it)->stream, 0);
658        } else {
659            mPendingBuffersMap.editValueAt(index) = 0;
660        }
661    }
662
663    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
664    mPendingRequestsList.clear();
665
666    /*flush the metadata list*/
667    if (!mStoredMetadataList.empty()) {
668        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
669              m != mStoredMetadataList.end(); m++) {
670            mMetadataChannel->bufDone(m->meta_buf);
671            free(m->meta_buf);
672            m = mStoredMetadataList.erase(m);
673        }
674    }
675    int32_t hal_version = CAM_HAL_V3;
676    stream_config_info.num_streams = streamList->num_streams;
677
678    //settings/parameters don't carry over for new configureStreams
679    memset(mParameters, 0, sizeof(parm_buffer_t));
680
681    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
682    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
683                sizeof(hal_version), &hal_version);
684
685    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
686                sizeof(stream_config_info), &stream_config_info);
687
688    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
689
690    mFirstRequest = true;
691
692    //Get min frame duration for this streams configuration
693    deriveMinFrameDuration();
694
695    pthread_mutex_unlock(&mMutex);
696    return rc;
697}
698
699/*===========================================================================
700 * FUNCTION   : validateCaptureRequest
701 *
702 * DESCRIPTION: validate a capture request from camera service
703 *
704 * PARAMETERS :
705 *   @request : request from framework to process
706 *
707 * RETURN     :
708 *
709 *==========================================================================*/
710int QCamera3HardwareInterface::validateCaptureRequest(
711                    camera3_capture_request_t *request)
712{
713    ssize_t idx = 0;
714    const camera3_stream_buffer_t *b;
715    CameraMetadata meta;
716
717    /* Sanity check the request */
718    if (request == NULL) {
719        ALOGE("%s: NULL capture request", __func__);
720        return BAD_VALUE;
721    }
722
723    uint32_t frameNumber = request->frame_number;
724    if (request->input_buffer != NULL &&
725            request->input_buffer->stream != mInputStream) {
726        ALOGE("%s: Request %d: Input buffer not from input stream!",
727                __FUNCTION__, frameNumber);
728        return BAD_VALUE;
729    }
730    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
731        ALOGE("%s: Request %d: No output buffers provided!",
732                __FUNCTION__, frameNumber);
733        return BAD_VALUE;
734    }
735    if (request->input_buffer != NULL) {
736        b = request->input_buffer;
737        QCamera3Channel *channel =
738            static_cast<QCamera3Channel*>(b->stream->priv);
739        if (channel == NULL) {
740            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
741                    __func__, frameNumber, idx);
742            return BAD_VALUE;
743        }
744        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
745            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
746                    __func__, frameNumber, idx);
747            return BAD_VALUE;
748        }
749        if (b->release_fence != -1) {
750            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
751                    __func__, frameNumber, idx);
752            return BAD_VALUE;
753        }
754        if (b->buffer == NULL) {
755            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
756                    __func__, frameNumber, idx);
757            return BAD_VALUE;
758        }
759    }
760
761    // Validate all buffers
762    b = request->output_buffers;
763    do {
764        QCamera3Channel *channel =
765                static_cast<QCamera3Channel*>(b->stream->priv);
766        if (channel == NULL) {
767            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
768                    __func__, frameNumber, idx);
769            return BAD_VALUE;
770        }
771        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
772            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
773                    __func__, frameNumber, idx);
774            return BAD_VALUE;
775        }
776        if (b->release_fence != -1) {
777            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
778                    __func__, frameNumber, idx);
779            return BAD_VALUE;
780        }
781        if (b->buffer == NULL) {
782            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
783                    __func__, frameNumber, idx);
784            return BAD_VALUE;
785        }
786        idx++;
787        b = request->output_buffers + idx;
788    } while (idx < (ssize_t)request->num_output_buffers);
789
790    return NO_ERROR;
791}
792
793/*===========================================================================
794 * FUNCTION   : deriveMinFrameDuration
795 *
796 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
797 *              on currently configured streams.
798 *
799 * PARAMETERS : NONE
800 *
801 * RETURN     : NONE
802 *
803 *==========================================================================*/
804void QCamera3HardwareInterface::deriveMinFrameDuration()
805{
806    int32_t maxJpegDimension, maxProcessedDimension;
807
808    maxJpegDimension = 0;
809    maxProcessedDimension = 0;
810
811    // Figure out maximum jpeg, processed, and raw dimensions
812    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
813        it != mStreamInfo.end(); it++) {
814
815        // Input stream doesn't have valid stream_type
816        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
817            continue;
818
819        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
820        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
821            if (dimension > maxJpegDimension)
822                maxJpegDimension = dimension;
823        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
824            if (dimension > maxProcessedDimension)
825                maxProcessedDimension = dimension;
826        }
827    }
828
829    //Assume all jpeg dimensions are in processed dimensions.
830    if (maxJpegDimension > maxProcessedDimension)
831        maxProcessedDimension = maxJpegDimension;
832
833    //Find minimum durations for processed, jpeg, and raw
834    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
835    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
836        if (maxProcessedDimension ==
837            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
838            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
839            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
840            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
841            break;
842        }
843    }
844}
845
846/*===========================================================================
847 * FUNCTION   : getMinFrameDuration
848 *
849 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
850 *              and current request configuration.
851 *
852 * PARAMETERS : @request: requset sent by the frameworks
853 *
854 * RETURN     : min farme duration for a particular request
855 *
856 *==========================================================================*/
857int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
858{
859    bool hasJpegStream = false;
860    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
861        const camera3_stream_t *stream = request->output_buffers[i].stream;
862        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
863            hasJpegStream = true;
864    }
865
866    if (!hasJpegStream)
867        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
868    else
869        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
870}
871
872/*===========================================================================
873 * FUNCTION   : registerStreamBuffers
874 *
875 * DESCRIPTION: Register buffers for a given stream with the HAL device.
876 *
877 * PARAMETERS :
878 *   @stream_list : streams to be configured
879 *
880 * RETURN     :
881 *
882 *==========================================================================*/
883int QCamera3HardwareInterface::registerStreamBuffers(
884        const camera3_stream_buffer_set_t *buffer_set)
885{
886    int rc = 0;
887
888    pthread_mutex_lock(&mMutex);
889
890    if (buffer_set == NULL) {
891        ALOGE("%s: Invalid buffer_set parameter.", __func__);
892        pthread_mutex_unlock(&mMutex);
893        return -EINVAL;
894    }
895    if (buffer_set->stream == NULL) {
896        ALOGE("%s: Invalid stream parameter.", __func__);
897        pthread_mutex_unlock(&mMutex);
898        return -EINVAL;
899    }
900    if (buffer_set->num_buffers < 1) {
901        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
902        pthread_mutex_unlock(&mMutex);
903        return -EINVAL;
904    }
905    if (buffer_set->buffers == NULL) {
906        ALOGE("%s: Invalid buffers parameter.", __func__);
907        pthread_mutex_unlock(&mMutex);
908        return -EINVAL;
909    }
910
911    camera3_stream_t *stream = buffer_set->stream;
912    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
913
914    //set the buffer_set in the mStreamInfo array
915    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
916            it != mStreamInfo.end(); it++) {
917        if ((*it)->stream == stream) {
918            uint32_t numBuffers = buffer_set->num_buffers;
919            (*it)->buffer_set.stream = buffer_set->stream;
920            (*it)->buffer_set.num_buffers = numBuffers;
921            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
922            if ((*it)->buffer_set.buffers == NULL) {
923                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
924                pthread_mutex_unlock(&mMutex);
925                return -ENOMEM;
926            }
927            for (size_t j = 0; j < numBuffers; j++){
928                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
929            }
930            (*it)->registered = 1;
931        }
932    }
933    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
934    if (rc < 0) {
935        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
936        pthread_mutex_unlock(&mMutex);
937        return -ENODEV;
938    }
939
940    pthread_mutex_unlock(&mMutex);
941    return NO_ERROR;
942}
943
944/*===========================================================================
945 * FUNCTION   : processCaptureRequest
946 *
947 * DESCRIPTION: process a capture request from camera service
948 *
949 * PARAMETERS :
950 *   @request : request from framework to process
951 *
952 * RETURN     :
953 *
954 *==========================================================================*/
955int QCamera3HardwareInterface::processCaptureRequest(
956                    camera3_capture_request_t *request)
957{
958    int rc = NO_ERROR;
959    int32_t request_id;
960    CameraMetadata meta;
961    MetadataBufferInfo reproc_meta;
962    int queueMetadata = 0;
963
964    pthread_mutex_lock(&mMutex);
965
966    rc = validateCaptureRequest(request);
967    if (rc != NO_ERROR) {
968        ALOGE("%s: incoming request is not valid", __func__);
969        pthread_mutex_unlock(&mMutex);
970        return rc;
971    }
972
973    meta = request->settings;
974
975    // For first capture request, send capture intent, and
976    // stream on all streams
977    if (mFirstRequest) {
978
979        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
980            int32_t hal_version = CAM_HAL_V3;
981            uint8_t captureIntent =
982                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
983
984            memset(mParameters, 0, sizeof(parm_buffer_t));
985            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
986            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
987                sizeof(hal_version), &hal_version);
988            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
989                sizeof(captureIntent), &captureIntent);
990            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
991                mParameters);
992        }
993
994        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
995            it != mStreamInfo.end(); it++) {
996            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
997            channel->start();
998        }
999    }
1000
1001    uint32_t frameNumber = request->frame_number;
1002    uint32_t streamTypeMask = 0;
1003
1004    if (meta.exists(ANDROID_REQUEST_ID)) {
1005        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1006        mCurrentRequestId = request_id;
1007        ALOGV("%s: Received request with id: %d",__func__, request_id);
1008    } else if (mFirstRequest || mCurrentRequestId == -1){
1009        ALOGE("%s: Unable to find request id field, \
1010                & no previous id available", __func__);
1011        return NAME_NOT_FOUND;
1012    } else {
1013        ALOGV("%s: Re-using old request id", __func__);
1014        request_id = mCurrentRequestId;
1015    }
1016
1017    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1018                                    __func__, __LINE__,
1019                                    request->num_output_buffers,
1020                                    request->input_buffer,
1021                                    frameNumber);
1022    // Acquire all request buffers first
1023    int blob_request = 0;
1024    for (size_t i = 0; i < request->num_output_buffers; i++) {
1025        const camera3_stream_buffer_t& output = request->output_buffers[i];
1026        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1027        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1028
1029        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1030        //Call function to store local copy of jpeg data for encode params.
1031            blob_request = 1;
1032            rc = getJpegSettings(request->settings);
1033            if (rc < 0) {
1034                ALOGE("%s: failed to get jpeg parameters", __func__);
1035                pthread_mutex_unlock(&mMutex);
1036                return rc;
1037            }
1038        }
1039
1040        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1041        if (rc != OK) {
1042            ALOGE("%s: fence wait failed %d", __func__, rc);
1043            pthread_mutex_unlock(&mMutex);
1044            return rc;
1045        }
1046        streamTypeMask |= channel->getStreamTypeMask();
1047    }
1048
1049    rc = setFrameParameters(request, streamTypeMask);
1050    if (rc < 0) {
1051        ALOGE("%s: fail to set frame parameters", __func__);
1052        pthread_mutex_unlock(&mMutex);
1053        return rc;
1054    }
1055
1056    /* Update pending request list and pending buffers map */
1057    PendingRequestInfo pendingRequest;
1058    pendingRequest.frame_number = frameNumber;
1059    pendingRequest.num_buffers = request->num_output_buffers;
1060    pendingRequest.request_id = request_id;
1061    pendingRequest.blob_request = blob_request;
1062    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1063
1064    for (size_t i = 0; i < request->num_output_buffers; i++) {
1065        RequestedBufferInfo requestedBuf;
1066        requestedBuf.stream = request->output_buffers[i].stream;
1067        requestedBuf.buffer = NULL;
1068        pendingRequest.buffers.push_back(requestedBuf);
1069
1070        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1071    }
1072    mPendingRequestsList.push_back(pendingRequest);
1073
1074    // Notify metadata channel we receive a request
1075    mMetadataChannel->request(NULL, frameNumber);
1076
1077    // Call request on other streams
1078    for (size_t i = 0; i < request->num_output_buffers; i++) {
1079        const camera3_stream_buffer_t& output = request->output_buffers[i];
1080        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1081        mm_camera_buf_def_t *pInputBuffer = NULL;
1082
1083        if (channel == NULL) {
1084            ALOGE("%s: invalid channel pointer for stream", __func__);
1085            continue;
1086        }
1087
1088        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1089            QCamera3RegularChannel* inputChannel = NULL;
1090            if(request->input_buffer != NULL){
1091                //Try to get the internal format
1092                inputChannel = (QCamera3RegularChannel*)
1093                    request->input_buffer->stream->priv;
1094                if(inputChannel == NULL ){
1095                    ALOGE("%s: failed to get input channel handle", __func__);
1096                } else {
1097                    pInputBuffer =
1098                        inputChannel->getInternalFormatBuffer(
1099                                request->input_buffer->buffer);
1100                    ALOGD("%s: Input buffer dump",__func__);
1101                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1102                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1103                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1104                    ALOGD("Handle:%p", request->input_buffer->buffer);
1105                    //TODO: need to get corresponding metadata and send it to pproc
1106                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1107                         m != mStoredMetadataList.end(); m++) {
1108                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1109                            reproc_meta.meta_buf = m->meta_buf;
1110                            queueMetadata = 1;
1111                            break;
1112                        }
1113                    }
1114                }
1115            }
1116            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1117                            pInputBuffer,(QCamera3Channel*)inputChannel);
1118            if (queueMetadata) {
1119                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1120            }
1121        } else {
1122            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1123                __LINE__, output.buffer, frameNumber);
1124            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1125                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1126                     m != mStoredMetadataList.end(); m++) {
1127                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1128                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1129                            mMetadataChannel->bufDone(m->meta_buf);
1130                            free(m->meta_buf);
1131                            m = mStoredMetadataList.erase(m);
1132                            break;
1133                        }
1134                   }
1135                }
1136            }
1137            rc = channel->request(output.buffer, frameNumber);
1138        }
1139        if (rc < 0)
1140            ALOGE("%s: request failed", __func__);
1141    }
1142
1143    mFirstRequest = false;
1144
1145    //Block on conditional variable
1146    mPendingRequest = 1;
1147    while (mPendingRequest == 1) {
1148        pthread_cond_wait(&mRequestCond, &mMutex);
1149    }
1150
1151    pthread_mutex_unlock(&mMutex);
1152    return rc;
1153}
1154
1155/*===========================================================================
1156 * FUNCTION   : getMetadataVendorTagOps
1157 *
1158 * DESCRIPTION:
1159 *
1160 * PARAMETERS :
1161 *
1162 *
1163 * RETURN     :
1164 *==========================================================================*/
1165void QCamera3HardwareInterface::getMetadataVendorTagOps(
1166                    vendor_tag_query_ops_t* /*ops*/)
1167{
1168    /* Enable locks when we eventually add Vendor Tags */
1169    /*
1170    pthread_mutex_lock(&mMutex);
1171
1172    pthread_mutex_unlock(&mMutex);
1173    */
1174    return;
1175}
1176
1177/*===========================================================================
1178 * FUNCTION   : dump
1179 *
1180 * DESCRIPTION:
1181 *
1182 * PARAMETERS :
1183 *
1184 *
1185 * RETURN     :
1186 *==========================================================================*/
1187void QCamera3HardwareInterface::dump(int /*fd*/)
1188{
1189    /*Enable lock when we implement this function*/
1190    /*
1191    pthread_mutex_lock(&mMutex);
1192
1193    pthread_mutex_unlock(&mMutex);
1194    */
1195    return;
1196}
1197
1198/*===========================================================================
1199 * FUNCTION   : flush
1200 *
1201 * DESCRIPTION:
1202 *
1203 * PARAMETERS :
1204 *
1205 *
1206 * RETURN     :
1207 *==========================================================================*/
1208int QCamera3HardwareInterface::flush()
1209{
1210    /*Enable lock when we implement this function*/
1211    /*
1212    pthread_mutex_lock(&mMutex);
1213
1214    pthread_mutex_unlock(&mMutex);
1215    */
1216    return 0;
1217}
1218
1219/*===========================================================================
1220 * FUNCTION   : captureResultCb
1221 *
1222 * DESCRIPTION: Callback handler for all capture result
1223 *              (streams, as well as metadata)
1224 *
1225 * PARAMETERS :
1226 *   @metadata : metadata information
1227 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1228 *               NULL if metadata.
1229 *
1230 * RETURN     : NONE
1231 *==========================================================================*/
1232void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1233                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1234{
1235    pthread_mutex_lock(&mMutex);
1236
1237    if (metadata_buf) {
1238        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1239        int32_t frame_number_valid = *(int32_t *)
1240            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1241        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1242            CAM_INTF_META_PENDING_REQUESTS, metadata);
1243        uint32_t frame_number = *(uint32_t *)
1244            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1245        const struct timeval *tv = (const struct timeval *)
1246            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1247        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1248            tv->tv_usec * NSEC_PER_USEC;
1249
1250        if (!frame_number_valid) {
1251            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1252            mMetadataChannel->bufDone(metadata_buf);
1253            goto done_metadata;
1254        }
1255        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1256                frame_number, capture_time);
1257
1258        // Go through the pending requests info and send shutter/results to frameworks
1259        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1260                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1261            camera3_capture_result_t result;
1262            camera3_notify_msg_t notify_msg;
1263            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1264
1265            // Flush out all entries with less or equal frame numbers.
1266
1267            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1268            //Right now it's the same as metadata timestamp
1269
1270            //TODO: When there is metadata drop, how do we derive the timestamp of
1271            //dropped frames? For now, we fake the dropped timestamp by substracting
1272            //from the reported timestamp
1273            nsecs_t current_capture_time = capture_time -
1274                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1275
1276            // Send shutter notify to frameworks
1277            notify_msg.type = CAMERA3_MSG_SHUTTER;
1278            notify_msg.message.shutter.frame_number = i->frame_number;
1279            notify_msg.message.shutter.timestamp = current_capture_time;
1280            mCallbackOps->notify(mCallbackOps, &notify_msg);
1281            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1282                    i->frame_number, capture_time);
1283
1284            // Send empty metadata with already filled buffers for dropped metadata
1285            // and send valid metadata with already filled buffers for current metadata
1286            if (i->frame_number < frame_number) {
1287                CameraMetadata dummyMetadata;
1288                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1289                        &current_capture_time, 1);
1290                dummyMetadata.update(ANDROID_REQUEST_ID,
1291                        &(i->request_id), 1);
1292                result.result = dummyMetadata.release();
1293            } else {
1294                result.result = translateCbMetadataToResultMetadata(metadata,
1295                        current_capture_time, i->request_id);
1296                if (mIsZslMode) {
1297                   int found_metadata = 0;
1298                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1299                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1300                        j != i->buffers.end(); j++) {
1301                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1302                         //check if corresp. zsl already exists in the stored metadata list
1303                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1304                               m != mStoredMetadataList.begin(); m++) {
1305                            if (m->frame_number == frame_number) {
1306                               m->meta_buf = metadata_buf;
1307                               found_metadata = 1;
1308                               break;
1309                            }
1310                         }
1311                         if (!found_metadata) {
1312                            MetadataBufferInfo store_meta_info;
1313                            store_meta_info.meta_buf = metadata_buf;
1314                            store_meta_info.frame_number = frame_number;
1315                            mStoredMetadataList.push_back(store_meta_info);
1316                            found_metadata = 1;
1317                         }
1318                      }
1319                   }
1320                   if (!found_metadata) {
1321                       if (!i->input_buffer_present && i->blob_request) {
1322                          //livesnapshot or fallback non-zsl snapshot case
1323                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1324                                j != i->buffers.end(); j++){
1325                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1326                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1327                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1328                                 break;
1329                              }
1330                         }
1331                       } else {
1332                            //return the metadata immediately
1333                            mMetadataChannel->bufDone(metadata_buf);
1334                            free(metadata_buf);
1335                       }
1336                   }
1337               } else if (!mIsZslMode && i->blob_request) {
1338                   //If it is a blob request then send the metadata to the picture channel
1339                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1340               } else {
1341                   // Return metadata buffer
1342                   mMetadataChannel->bufDone(metadata_buf);
1343                   free(metadata_buf);
1344               }
1345
1346            }
1347            if (!result.result) {
1348                ALOGE("%s: metadata is NULL", __func__);
1349            }
1350            result.frame_number = i->frame_number;
1351            result.num_output_buffers = 0;
1352            result.output_buffers = NULL;
1353            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1354                    j != i->buffers.end(); j++) {
1355                if (j->buffer) {
1356                    result.num_output_buffers++;
1357                }
1358            }
1359
1360            if (result.num_output_buffers > 0) {
1361                camera3_stream_buffer_t *result_buffers =
1362                    new camera3_stream_buffer_t[result.num_output_buffers];
1363                if (!result_buffers) {
1364                    ALOGE("%s: Fatal error: out of memory", __func__);
1365                }
1366                size_t result_buffers_idx = 0;
1367                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1368                        j != i->buffers.end(); j++) {
1369                    if (j->buffer) {
1370                        result_buffers[result_buffers_idx++] = *(j->buffer);
1371                        free(j->buffer);
1372                        j->buffer = NULL;
1373                        mPendingBuffersMap.editValueFor(j->stream)--;
1374                    }
1375                }
1376                result.output_buffers = result_buffers;
1377
1378                mCallbackOps->process_capture_result(mCallbackOps, &result);
1379                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1380                        __func__, result.frame_number, current_capture_time);
1381                free_camera_metadata((camera_metadata_t *)result.result);
1382                delete[] result_buffers;
1383            } else {
1384                mCallbackOps->process_capture_result(mCallbackOps, &result);
1385                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1386                        __func__, result.frame_number, current_capture_time);
1387                free_camera_metadata((camera_metadata_t *)result.result);
1388            }
1389            // erase the element from the list
1390            i = mPendingRequestsList.erase(i);
1391        }
1392
1393
1394done_metadata:
1395        bool max_buffers_dequeued = false;
1396        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1397            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1398            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1399            if (queued_buffers == stream->max_buffers) {
1400                max_buffers_dequeued = true;
1401                break;
1402            }
1403        }
1404        if (!max_buffers_dequeued && !pending_requests) {
1405            // Unblock process_capture_request
1406            mPendingRequest = 0;
1407            pthread_cond_signal(&mRequestCond);
1408        }
1409    } else {
1410        // If the frame number doesn't exist in the pending request list,
1411        // directly send the buffer to the frameworks, and update pending buffers map
1412        // Otherwise, book-keep the buffer.
1413        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1414        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1415            i++;
1416        }
1417        if (i == mPendingRequestsList.end()) {
1418            // Verify all pending requests frame_numbers are greater
1419            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1420                    j != mPendingRequestsList.end(); j++) {
1421                if (j->frame_number < frame_number) {
1422                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1423                            __func__, j->frame_number, frame_number);
1424                }
1425            }
1426            camera3_capture_result_t result;
1427            result.result = NULL;
1428            result.frame_number = frame_number;
1429            result.num_output_buffers = 1;
1430            result.output_buffers = buffer;
1431            ALOGV("%s: result frame_number = %d, buffer = %p",
1432                    __func__, frame_number, buffer);
1433            mPendingBuffersMap.editValueFor(buffer->stream)--;
1434            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1435                int found = 0;
1436                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1437                      k != mStoredMetadataList.end(); k++) {
1438                    if (k->frame_number == frame_number) {
1439                        k->zsl_buf_hdl = buffer->buffer;
1440                        found = 1;
1441                        break;
1442                    }
1443                }
1444                if (!found) {
1445                   MetadataBufferInfo meta_info;
1446                   meta_info.frame_number = frame_number;
1447                   meta_info.zsl_buf_hdl = buffer->buffer;
1448                   mStoredMetadataList.push_back(meta_info);
1449                }
1450            }
1451            mCallbackOps->process_capture_result(mCallbackOps, &result);
1452        } else {
1453            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1454                    j != i->buffers.end(); j++) {
1455                if (j->stream == buffer->stream) {
1456                    if (j->buffer != NULL) {
1457                        ALOGE("%s: Error: buffer is already set", __func__);
1458                    } else {
1459                        j->buffer = (camera3_stream_buffer_t *)malloc(
1460                                sizeof(camera3_stream_buffer_t));
1461                        *(j->buffer) = *buffer;
1462                        ALOGV("%s: cache buffer %p at result frame_number %d",
1463                                __func__, buffer, frame_number);
1464                    }
1465                }
1466            }
1467        }
1468    }
1469    pthread_mutex_unlock(&mMutex);
1470    return;
1471}
1472
1473/*===========================================================================
1474 * FUNCTION   : translateCbMetadataToResultMetadata
1475 *
1476 * DESCRIPTION:
1477 *
1478 * PARAMETERS :
1479 *   @metadata : metadata information from callback
1480 *
1481 * RETURN     : camera_metadata_t*
1482 *              metadata in a format specified by fwk
1483 *==========================================================================*/
1484camera_metadata_t*
1485QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1486                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1487                                 int32_t request_id)
1488{
1489    CameraMetadata camMetadata;
1490    camera_metadata_t* resultMetadata;
1491
1492    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1493    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1494
1495    /*CAM_INTF_META_HISTOGRAM - TODO*/
1496    /*cam_hist_stats_t  *histogram =
1497      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1498      metadata);*/
1499
1500    /*face detection*/
1501    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1502        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1503    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1504    int32_t faceIds[numFaces];
1505    uint8_t faceScores[numFaces];
1506    int32_t faceRectangles[numFaces * 4];
1507    int32_t faceLandmarks[numFaces * 6];
1508    int j = 0, k = 0;
1509    for (int i = 0; i < numFaces; i++) {
1510        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1511        faceScores[i] = faceDetectionInfo->faces[i].score;
1512        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1513                faceRectangles+j, -1);
1514        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1515        j+= 4;
1516        k+= 6;
1517    }
1518    if (numFaces > 0) {
1519        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1520        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1521        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1522            faceRectangles, numFaces*4);
1523        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1524            faceLandmarks, numFaces*6);
1525    }
1526
1527    uint8_t  *color_correct_mode =
1528        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1529    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1530
1531    int32_t  *ae_precapture_id =
1532        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1533    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1534
1535    /*aec regions*/
1536    cam_area_t  *hAeRegions =
1537        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1538    int32_t aeRegions[5];
1539    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1540    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1541
1542    uint8_t *ae_state =
1543            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1544    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1545
1546    uint8_t  *focusMode =
1547        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1548    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1549
1550    /*af regions*/
1551    cam_area_t  *hAfRegions =
1552        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1553    int32_t afRegions[5];
1554    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1555    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1556
1557    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1558    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1559
1560    int32_t  *afTriggerId =
1561        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1562    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1563
1564    uint8_t  *whiteBalance =
1565        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1566    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1567
1568    /*awb regions*/
1569    cam_area_t  *hAwbRegions =
1570        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1571    int32_t awbRegions[5];
1572    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1573    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1574
1575    uint8_t  *whiteBalanceState =
1576        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1577    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1578
1579    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1580    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1581
1582    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1583    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1584
1585    uint8_t  *flashPower =
1586        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1587    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1588
1589    int64_t  *flashFiringTime =
1590        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1591    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1592
1593    /*int32_t  *ledMode =
1594      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1595      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1596
1597    uint8_t  *flashState =
1598        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1599    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1600
1601    uint8_t  *hotPixelMode =
1602        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1603    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1604
1605    float  *lensAperture =
1606        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1607    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1608
1609    float  *filterDensity =
1610        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1611    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1612
1613    float  *focalLength =
1614        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1615    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1616
1617    float  *focusDistance =
1618        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1619    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1620
1621    float  *focusRange =
1622        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1623    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1624
1625    uint8_t  *opticalStab =
1626        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1627    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1628
1629    /*int32_t  *focusState =
1630      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1631      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1632
1633    uint8_t  *noiseRedMode =
1634        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1635    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1636
1637    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1638
1639    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1640        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1641    int32_t scalerCropRegion[4];
1642    scalerCropRegion[0] = hScalerCropRegion->left;
1643    scalerCropRegion[1] = hScalerCropRegion->top;
1644    scalerCropRegion[2] = hScalerCropRegion->width;
1645    scalerCropRegion[3] = hScalerCropRegion->height;
1646    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1647
1648    int64_t  *sensorExpTime =
1649        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1650    mMetadataResponse.exposure_time = *sensorExpTime;
1651    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1652    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1653
1654    int64_t  *sensorFameDuration =
1655        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1656    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1657    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1658
1659    int32_t  *sensorSensitivity =
1660        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1661    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1662    mMetadataResponse.iso_speed = *sensorSensitivity;
1663    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1664
1665    uint8_t  *shadingMode =
1666        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1667    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1668
1669    uint8_t  *faceDetectMode =
1670        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1671    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1672        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1673        *faceDetectMode);
1674    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1675
1676    uint8_t  *histogramMode =
1677        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1678    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1679
1680    uint8_t  *sharpnessMapMode =
1681        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1682    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1683            sharpnessMapMode, 1);
1684
1685    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1686    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1687        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1688    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1689            (int32_t*)sharpnessMap->sharpness,
1690            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1691
1692    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1693        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1694    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1695    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1696    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1697                       (float*)lensShadingMap->lens_shading,
1698                       4*map_width*map_height);
1699
1700    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1701        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1702    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1703
1704    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1705        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1706    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1707                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1708
1709    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1710        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1711    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1712                       predColorCorrectionGains->gains, 4);
1713
1714    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1715        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1716    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1717                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1718
1719    uint8_t *blackLevelLock = (uint8_t*)
1720        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1721    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1722
1723    uint8_t *sceneFlicker = (uint8_t*)
1724        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1725    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1726
1727
1728    resultMetadata = camMetadata.release();
1729    return resultMetadata;
1730}
1731
1732/*===========================================================================
1733 * FUNCTION   : convertToRegions
1734 *
1735 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1736 *
1737 * PARAMETERS :
1738 *   @rect   : cam_rect_t struct to convert
1739 *   @region : int32_t destination array
1740 *   @weight : if we are converting from cam_area_t, weight is valid
1741 *             else weight = -1
1742 *
1743 *==========================================================================*/
1744void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1745    region[0] = rect.left;
1746    region[1] = rect.top;
1747    region[2] = rect.left + rect.width;
1748    region[3] = rect.top + rect.height;
1749    if (weight > -1) {
1750        region[4] = weight;
1751    }
1752}
1753
1754/*===========================================================================
1755 * FUNCTION   : convertFromRegions
1756 *
1757 * DESCRIPTION: helper method to convert from array to cam_rect_t
1758 *
1759 * PARAMETERS :
1760 *   @rect   : cam_rect_t struct to convert
1761 *   @region : int32_t destination array
1762 *   @weight : if we are converting from cam_area_t, weight is valid
1763 *             else weight = -1
1764 *
1765 *==========================================================================*/
1766void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1767                                                   const camera_metadata_t *settings,
1768                                                   uint32_t tag){
1769    CameraMetadata frame_settings;
1770    frame_settings = settings;
1771    int32_t x_min = frame_settings.find(tag).data.i32[0];
1772    int32_t y_min = frame_settings.find(tag).data.i32[1];
1773    int32_t x_max = frame_settings.find(tag).data.i32[2];
1774    int32_t y_max = frame_settings.find(tag).data.i32[3];
1775    roi->weight = frame_settings.find(tag).data.i32[4];
1776    roi->rect.left = x_min;
1777    roi->rect.top = y_min;
1778    roi->rect.width = x_max - x_min;
1779    roi->rect.height = y_max - y_min;
1780}
1781
1782/*===========================================================================
1783 * FUNCTION   : resetIfNeededROI
1784 *
1785 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1786 *              crop region
1787 *
1788 * PARAMETERS :
1789 *   @roi       : cam_area_t struct to resize
1790 *   @scalerCropRegion : cam_crop_region_t region to compare against
1791 *
1792 *
1793 *==========================================================================*/
1794bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1795                                                 const cam_crop_region_t* scalerCropRegion)
1796{
1797    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1798    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1799    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1800    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1801    if ((roi_x_max < scalerCropRegion->left) ||
1802        (roi_y_max < scalerCropRegion->top)  ||
1803        (roi->rect.left > crop_x_max) ||
1804        (roi->rect.top > crop_y_max)){
1805        return false;
1806    }
1807    if (roi->rect.left < scalerCropRegion->left) {
1808        roi->rect.left = scalerCropRegion->left;
1809    }
1810    if (roi->rect.top < scalerCropRegion->top) {
1811        roi->rect.top = scalerCropRegion->top;
1812    }
1813    if (roi_x_max > crop_x_max) {
1814        roi_x_max = crop_x_max;
1815    }
1816    if (roi_y_max > crop_y_max) {
1817        roi_y_max = crop_y_max;
1818    }
1819    roi->rect.width = roi_x_max - roi->rect.left;
1820    roi->rect.height = roi_y_max - roi->rect.top;
1821    return true;
1822}
1823
1824/*===========================================================================
1825 * FUNCTION   : convertLandmarks
1826 *
1827 * DESCRIPTION: helper method to extract the landmarks from face detection info
1828 *
1829 * PARAMETERS :
1830 *   @face   : cam_rect_t struct to convert
1831 *   @landmarks : int32_t destination array
1832 *
1833 *
1834 *==========================================================================*/
1835void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1836{
1837    landmarks[0] = face.left_eye_center.x;
1838    landmarks[1] = face.left_eye_center.y;
1839    landmarks[2] = face.right_eye_center.y;
1840    landmarks[3] = face.right_eye_center.y;
1841    landmarks[4] = face.mouth_center.x;
1842    landmarks[5] = face.mouth_center.y;
1843}
1844
1845#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1846/*===========================================================================
1847 * FUNCTION   : initCapabilities
1848 *
1849 * DESCRIPTION: initialize camera capabilities in static data struct
1850 *
1851 * PARAMETERS :
1852 *   @cameraId  : camera Id
1853 *
1854 * RETURN     : int32_t type of status
1855 *              NO_ERROR  -- success
1856 *              none-zero failure code
1857 *==========================================================================*/
1858int QCamera3HardwareInterface::initCapabilities(int cameraId)
1859{
1860    int rc = 0;
1861    mm_camera_vtbl_t *cameraHandle = NULL;
1862    QCamera3HeapMemory *capabilityHeap = NULL;
1863
1864    cameraHandle = camera_open(cameraId);
1865    if (!cameraHandle) {
1866        ALOGE("%s: camera_open failed", __func__);
1867        rc = -1;
1868        goto open_failed;
1869    }
1870
1871    capabilityHeap = new QCamera3HeapMemory();
1872    if (capabilityHeap == NULL) {
1873        ALOGE("%s: creation of capabilityHeap failed", __func__);
1874        goto heap_creation_failed;
1875    }
1876    /* Allocate memory for capability buffer */
1877    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1878    if(rc != OK) {
1879        ALOGE("%s: No memory for cappability", __func__);
1880        goto allocate_failed;
1881    }
1882
1883    /* Map memory for capability buffer */
1884    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1885    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1886                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1887                                capabilityHeap->getFd(0),
1888                                sizeof(cam_capability_t));
1889    if(rc < 0) {
1890        ALOGE("%s: failed to map capability buffer", __func__);
1891        goto map_failed;
1892    }
1893
1894    /* Query Capability */
1895    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1896    if(rc < 0) {
1897        ALOGE("%s: failed to query capability",__func__);
1898        goto query_failed;
1899    }
1900    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1901    if (!gCamCapability[cameraId]) {
1902        ALOGE("%s: out of memory", __func__);
1903        goto query_failed;
1904    }
1905    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1906                                        sizeof(cam_capability_t));
1907    rc = 0;
1908
1909query_failed:
1910    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1911                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1912map_failed:
1913    capabilityHeap->deallocate();
1914allocate_failed:
1915    delete capabilityHeap;
1916heap_creation_failed:
1917    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1918    cameraHandle = NULL;
1919open_failed:
1920    return rc;
1921}
1922
1923/*===========================================================================
1924 * FUNCTION   : initParameters
1925 *
1926 * DESCRIPTION: initialize camera parameters
1927 *
1928 * PARAMETERS :
1929 *
1930 * RETURN     : int32_t type of status
1931 *              NO_ERROR  -- success
1932 *              none-zero failure code
1933 *==========================================================================*/
1934int QCamera3HardwareInterface::initParameters()
1935{
1936    int rc = 0;
1937
1938    //Allocate Set Param Buffer
1939    mParamHeap = new QCamera3HeapMemory();
1940    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1941    if(rc != OK) {
1942        rc = NO_MEMORY;
1943        ALOGE("Failed to allocate SETPARM Heap memory");
1944        delete mParamHeap;
1945        mParamHeap = NULL;
1946        return rc;
1947    }
1948
1949    //Map memory for parameters buffer
1950    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1951            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1952            mParamHeap->getFd(0),
1953            sizeof(parm_buffer_t));
1954    if(rc < 0) {
1955        ALOGE("%s:failed to map SETPARM buffer",__func__);
1956        rc = FAILED_TRANSACTION;
1957        mParamHeap->deallocate();
1958        delete mParamHeap;
1959        mParamHeap = NULL;
1960        return rc;
1961    }
1962
1963    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1964    return rc;
1965}
1966
1967/*===========================================================================
1968 * FUNCTION   : deinitParameters
1969 *
1970 * DESCRIPTION: de-initialize camera parameters
1971 *
1972 * PARAMETERS :
1973 *
1974 * RETURN     : NONE
1975 *==========================================================================*/
1976void QCamera3HardwareInterface::deinitParameters()
1977{
1978    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1979            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1980
1981    mParamHeap->deallocate();
1982    delete mParamHeap;
1983    mParamHeap = NULL;
1984
1985    mParameters = NULL;
1986}
1987
1988/*===========================================================================
1989 * FUNCTION   : calcMaxJpegSize
1990 *
1991 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1992 *
1993 * PARAMETERS :
1994 *
1995 * RETURN     : max_jpeg_size
1996 *==========================================================================*/
1997int QCamera3HardwareInterface::calcMaxJpegSize()
1998{
1999    int32_t max_jpeg_size = 0;
2000    int temp_width, temp_height;
2001    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2002        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2003        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2004        if (temp_width * temp_height > max_jpeg_size ) {
2005            max_jpeg_size = temp_width * temp_height;
2006        }
2007    }
2008    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2009    return max_jpeg_size;
2010}
2011
2012/*===========================================================================
2013 * FUNCTION   : initStaticMetadata
2014 *
2015 * DESCRIPTION: initialize the static metadata
2016 *
2017 * PARAMETERS :
2018 *   @cameraId  : camera Id
2019 *
2020 * RETURN     : int32_t type of status
2021 *              0  -- success
2022 *              non-zero failure code
2023 *==========================================================================*/
2024int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2025{
2026    int rc = 0;
2027    CameraMetadata staticInfo;
2028
2029    /* android.info: hardware level */
2030    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2031    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2032        &supportedHardwareLevel, 1);
2033
2034    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2035    /*HAL 3 only*/
2036    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2037                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2038
2039    /*hard coded for now but this should come from sensor*/
2040    float min_focus_distance;
2041    if(facingBack){
2042        min_focus_distance = 10;
2043    } else {
2044        min_focus_distance = 0;
2045    }
2046    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2047                    &min_focus_distance, 1);
2048
2049    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2050                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2051
2052    /*should be using focal lengths but sensor doesn't provide that info now*/
2053    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2054                      &gCamCapability[cameraId]->focal_length,
2055                      1);
2056
2057    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2058                      gCamCapability[cameraId]->apertures,
2059                      gCamCapability[cameraId]->apertures_count);
2060
2061    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2062                gCamCapability[cameraId]->filter_densities,
2063                gCamCapability[cameraId]->filter_densities_count);
2064
2065
2066    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2067                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2068                      gCamCapability[cameraId]->optical_stab_modes_count);
2069
2070    staticInfo.update(ANDROID_LENS_POSITION,
2071                      gCamCapability[cameraId]->lens_position,
2072                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2073
2074    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2075                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2076    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2077                      lens_shading_map_size,
2078                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2079
2080    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2081                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2082    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2083            geo_correction_map_size,
2084            sizeof(geo_correction_map_size)/sizeof(int32_t));
2085
2086    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2087                       gCamCapability[cameraId]->geo_correction_map,
2088                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2089
2090    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2091            gCamCapability[cameraId]->sensor_physical_size, 2);
2092
2093    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2094            gCamCapability[cameraId]->exposure_time_range, 2);
2095
2096    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2097            &gCamCapability[cameraId]->max_frame_duration, 1);
2098
2099
2100    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2101                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2102
2103    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2104                                               gCamCapability[cameraId]->pixel_array_size.height};
2105    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2106                      pixel_array_size, 2);
2107
2108    int32_t active_array_size[] = {0, 0,
2109                                                gCamCapability[cameraId]->active_array_size.width,
2110                                                gCamCapability[cameraId]->active_array_size.height};
2111    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2112                      active_array_size, 4);
2113
2114    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2115            &gCamCapability[cameraId]->white_level, 1);
2116
2117    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2118            gCamCapability[cameraId]->black_level_pattern, 4);
2119
2120    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2121                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2122
2123    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2124                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2125
2126    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2127                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2128
2129    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2130                      &gCamCapability[cameraId]->histogram_size, 1);
2131
2132    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2133            &gCamCapability[cameraId]->max_histogram_count, 1);
2134
2135    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2136                                                gCamCapability[cameraId]->sharpness_map_size.height};
2137
2138    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2139            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2140
2141    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2142            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2143
2144
2145    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2146                      &gCamCapability[cameraId]->raw_min_duration,
2147                       1);
2148
2149    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2150                                                HAL_PIXEL_FORMAT_BLOB};
2151    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2152    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2153                      scalar_formats,
2154                      scalar_formats_count);
2155
2156    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2157    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2158              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2159              available_processed_sizes);
2160    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2161                available_processed_sizes,
2162                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2163
2164    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2165                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2166                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2167
2168    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2169    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2170                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2171                 available_fps_ranges);
2172    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2173            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2174
2175    camera_metadata_rational exposureCompensationStep = {
2176            gCamCapability[cameraId]->exp_compensation_step.numerator,
2177            gCamCapability[cameraId]->exp_compensation_step.denominator};
2178    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2179                      &exposureCompensationStep, 1);
2180
2181    /*TO DO*/
2182    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2183    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2184                      availableVstabModes, sizeof(availableVstabModes));
2185
2186    /*HAL 1 and HAL 3 common*/
2187    float maxZoom = 4;
2188    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2189            &maxZoom, 1);
2190
2191    int32_t max3aRegions = 1;
2192    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2193            &max3aRegions, 1);
2194
2195    uint8_t availableFaceDetectModes[] = {
2196            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2197            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2198    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2199                      availableFaceDetectModes,
2200                      sizeof(availableFaceDetectModes));
2201
2202    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2203                                       gCamCapability[cameraId]->raw_dim.height};
2204    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2205                      raw_size,
2206                      sizeof(raw_size)/sizeof(uint32_t));
2207
2208    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2209                                                        gCamCapability[cameraId]->exposure_compensation_max};
2210    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2211            exposureCompensationRange,
2212            sizeof(exposureCompensationRange)/sizeof(int32_t));
2213
2214    uint8_t lensFacing = (facingBack) ?
2215            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2216    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2217
2218    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2219                available_processed_sizes,
2220                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2221
2222    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2223                      available_thumbnail_sizes,
2224                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2225
2226    int32_t max_jpeg_size = 0;
2227    int temp_width, temp_height;
2228    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2229        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2230        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2231        if (temp_width * temp_height > max_jpeg_size ) {
2232            max_jpeg_size = temp_width * temp_height;
2233        }
2234    }
2235    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2236    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2237                      &max_jpeg_size, 1);
2238
2239    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2240    int32_t size = 0;
2241    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2242        int val = lookupFwkName(EFFECT_MODES_MAP,
2243                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2244                                   gCamCapability[cameraId]->supported_effects[i]);
2245        if (val != NAME_NOT_FOUND) {
2246            avail_effects[size] = (uint8_t)val;
2247            size++;
2248        }
2249    }
2250    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2251                      avail_effects,
2252                      size);
2253
2254    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2255    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2256    int32_t supported_scene_modes_cnt = 0;
2257    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2258        int val = lookupFwkName(SCENE_MODES_MAP,
2259                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2260                                gCamCapability[cameraId]->supported_scene_modes[i]);
2261        if (val != NAME_NOT_FOUND) {
2262            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2263            supported_indexes[supported_scene_modes_cnt] = i;
2264            supported_scene_modes_cnt++;
2265        }
2266    }
2267
2268    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2269                      avail_scene_modes,
2270                      supported_scene_modes_cnt);
2271
2272    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2273    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2274                      supported_scene_modes_cnt,
2275                      scene_mode_overrides,
2276                      supported_indexes,
2277                      cameraId);
2278    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2279                      scene_mode_overrides,
2280                      supported_scene_modes_cnt*3);
2281
2282    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2283    size = 0;
2284    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2285        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2286                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2287                                 gCamCapability[cameraId]->supported_antibandings[i]);
2288        if (val != NAME_NOT_FOUND) {
2289            avail_antibanding_modes[size] = (uint8_t)val;
2290            size++;
2291        }
2292
2293    }
2294    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2295                      avail_antibanding_modes,
2296                      size);
2297
2298    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2299    size = 0;
2300    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2301        int val = lookupFwkName(FOCUS_MODES_MAP,
2302                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2303                                gCamCapability[cameraId]->supported_focus_modes[i]);
2304        if (val != NAME_NOT_FOUND) {
2305            avail_af_modes[size] = (uint8_t)val;
2306            size++;
2307        }
2308    }
2309    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2310                      avail_af_modes,
2311                      size);
2312
2313    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2314    size = 0;
2315    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2316        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2317                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2318                                    gCamCapability[cameraId]->supported_white_balances[i]);
2319        if (val != NAME_NOT_FOUND) {
2320            avail_awb_modes[size] = (uint8_t)val;
2321            size++;
2322        }
2323    }
2324    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2325                      avail_awb_modes,
2326                      size);
2327
2328    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2329    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2330      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2331
2332    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2333            available_flash_levels,
2334            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2335
2336
2337    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2338    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2339            &flashAvailable, 1);
2340
2341    uint8_t avail_ae_modes[5];
2342    size = 0;
2343    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2344        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2345        size++;
2346    }
2347    if (flashAvailable) {
2348        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2349        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2350        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2351    }
2352    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2353                      avail_ae_modes,
2354                      size);
2355
2356    int32_t sensitivity_range[2];
2357    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2358    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2359    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2360                      sensitivity_range,
2361                      sizeof(sensitivity_range) / sizeof(int32_t));
2362
2363    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2364                      &gCamCapability[cameraId]->max_analog_sensitivity,
2365                      1);
2366
2367    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2368                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2369                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2370
2371    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2372    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2373                      &sensor_orientation,
2374                      1);
2375
2376    int32_t max_output_streams[3] = {1, 3, 1};
2377    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2378                      max_output_streams,
2379                      3);
2380
2381    gStaticMetadata[cameraId] = staticInfo.release();
2382    return rc;
2383}
2384
2385/*===========================================================================
2386 * FUNCTION   : makeTable
2387 *
2388 * DESCRIPTION: make a table of sizes
2389 *
2390 * PARAMETERS :
2391 *
2392 *
2393 *==========================================================================*/
2394void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2395                                          int32_t* sizeTable)
2396{
2397    int j = 0;
2398    for (int i = 0; i < size; i++) {
2399        sizeTable[j] = dimTable[i].width;
2400        sizeTable[j+1] = dimTable[i].height;
2401        j+=2;
2402    }
2403}
2404
2405/*===========================================================================
2406 * FUNCTION   : makeFPSTable
2407 *
2408 * DESCRIPTION: make a table of fps ranges
2409 *
2410 * PARAMETERS :
2411 *
2412 *==========================================================================*/
2413void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2414                                          int32_t* fpsRangesTable)
2415{
2416    int j = 0;
2417    for (int i = 0; i < size; i++) {
2418        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2419        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2420        j+=2;
2421    }
2422}
2423
2424/*===========================================================================
2425 * FUNCTION   : makeOverridesList
2426 *
2427 * DESCRIPTION: make a list of scene mode overrides
2428 *
2429 * PARAMETERS :
2430 *
2431 *
2432 *==========================================================================*/
2433void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2434                                                  uint8_t size, uint8_t* overridesList,
2435                                                  uint8_t* supported_indexes,
2436                                                  int camera_id)
2437{
2438    /*daemon will give a list of overrides for all scene modes.
2439      However we should send the fwk only the overrides for the scene modes
2440      supported by the framework*/
2441    int j = 0, index = 0, supt = 0;
2442    uint8_t focus_override;
2443    for (int i = 0; i < size; i++) {
2444        supt = 0;
2445        index = supported_indexes[i];
2446        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2447        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2448                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2449                                                    overridesTable[index].awb_mode);
2450        focus_override = (uint8_t)overridesTable[index].af_mode;
2451        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2452           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2453              supt = 1;
2454              break;
2455           }
2456        }
2457        if (supt) {
2458           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2459                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2460                                              focus_override);
2461        } else {
2462           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2463        }
2464        j+=3;
2465    }
2466}
2467
2468/*===========================================================================
2469 * FUNCTION   : getPreviewHalPixelFormat
2470 *
2471 * DESCRIPTION: convert the format to type recognized by framework
2472 *
2473 * PARAMETERS : format : the format from backend
2474 *
2475 ** RETURN    : format recognized by framework
2476 *
2477 *==========================================================================*/
2478int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2479{
2480    int32_t halPixelFormat;
2481
2482    switch (format) {
2483    case CAM_FORMAT_YUV_420_NV12:
2484        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2485        break;
2486    case CAM_FORMAT_YUV_420_NV21:
2487        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2488        break;
2489    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2490        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2491        break;
2492    case CAM_FORMAT_YUV_420_YV12:
2493        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2494        break;
2495    case CAM_FORMAT_YUV_422_NV16:
2496    case CAM_FORMAT_YUV_422_NV61:
2497    default:
2498        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2499        break;
2500    }
2501    return halPixelFormat;
2502}
2503
2504/*===========================================================================
2505 * FUNCTION   : getSensorSensitivity
2506 *
2507 * DESCRIPTION: convert iso_mode to an integer value
2508 *
2509 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2510 *
2511 ** RETURN    : sensitivity supported by sensor
2512 *
2513 *==========================================================================*/
2514int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2515{
2516    int32_t sensitivity;
2517
2518    switch (iso_mode) {
2519    case CAM_ISO_MODE_100:
2520        sensitivity = 100;
2521        break;
2522    case CAM_ISO_MODE_200:
2523        sensitivity = 200;
2524        break;
2525    case CAM_ISO_MODE_400:
2526        sensitivity = 400;
2527        break;
2528    case CAM_ISO_MODE_800:
2529        sensitivity = 800;
2530        break;
2531    case CAM_ISO_MODE_1600:
2532        sensitivity = 1600;
2533        break;
2534    default:
2535        sensitivity = -1;
2536        break;
2537    }
2538    return sensitivity;
2539}
2540
2541
2542/*===========================================================================
2543 * FUNCTION   : AddSetParmEntryToBatch
2544 *
2545 * DESCRIPTION: add set parameter entry into batch
2546 *
2547 * PARAMETERS :
2548 *   @p_table     : ptr to parameter buffer
2549 *   @paramType   : parameter type
2550 *   @paramLength : length of parameter value
2551 *   @paramValue  : ptr to parameter value
2552 *
2553 * RETURN     : int32_t type of status
2554 *              NO_ERROR  -- success
2555 *              none-zero failure code
2556 *==========================================================================*/
2557int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2558                                                          cam_intf_parm_type_t paramType,
2559                                                          uint32_t paramLength,
2560                                                          void *paramValue)
2561{
2562    int position = paramType;
2563    int current, next;
2564
2565    /*************************************************************************
2566    *                 Code to take care of linking next flags                *
2567    *************************************************************************/
2568    current = GET_FIRST_PARAM_ID(p_table);
2569    if (position == current){
2570        //DO NOTHING
2571    } else if (position < current){
2572        SET_NEXT_PARAM_ID(position, p_table, current);
2573        SET_FIRST_PARAM_ID(p_table, position);
2574    } else {
2575        /* Search for the position in the linked list where we need to slot in*/
2576        while (position > GET_NEXT_PARAM_ID(current, p_table))
2577            current = GET_NEXT_PARAM_ID(current, p_table);
2578
2579        /*If node already exists no need to alter linking*/
2580        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2581            next = GET_NEXT_PARAM_ID(current, p_table);
2582            SET_NEXT_PARAM_ID(current, p_table, position);
2583            SET_NEXT_PARAM_ID(position, p_table, next);
2584        }
2585    }
2586
2587    /*************************************************************************
2588    *                   Copy contents into entry                             *
2589    *************************************************************************/
2590
2591    if (paramLength > sizeof(parm_type_t)) {
2592        ALOGE("%s:Size of input larger than max entry size",__func__);
2593        return BAD_VALUE;
2594    }
2595    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2596    return NO_ERROR;
2597}
2598
2599/*===========================================================================
2600 * FUNCTION   : lookupFwkName
2601 *
2602 * DESCRIPTION: In case the enum is not same in fwk and backend
2603 *              make sure the parameter is correctly propogated
2604 *
2605 * PARAMETERS  :
2606 *   @arr      : map between the two enums
2607 *   @len      : len of the map
2608 *   @hal_name : name of the hal_parm to map
2609 *
2610 * RETURN     : int type of status
2611 *              fwk_name  -- success
2612 *              none-zero failure code
2613 *==========================================================================*/
2614int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2615                                             int len, int hal_name)
2616{
2617
2618    for (int i = 0; i < len; i++) {
2619        if (arr[i].hal_name == hal_name)
2620            return arr[i].fwk_name;
2621    }
2622
2623    /* Not able to find matching framework type is not necessarily
2624     * an error case. This happens when mm-camera supports more attributes
2625     * than the frameworks do */
2626    ALOGD("%s: Cannot find matching framework type", __func__);
2627    return NAME_NOT_FOUND;
2628}
2629
2630/*===========================================================================
2631 * FUNCTION   : lookupHalName
2632 *
2633 * DESCRIPTION: In case the enum is not same in fwk and backend
2634 *              make sure the parameter is correctly propogated
2635 *
2636 * PARAMETERS  :
2637 *   @arr      : map between the two enums
2638 *   @len      : len of the map
2639 *   @fwk_name : name of the hal_parm to map
2640 *
2641 * RETURN     : int32_t type of status
2642 *              hal_name  -- success
2643 *              none-zero failure code
2644 *==========================================================================*/
2645int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2646                                             int len, int fwk_name)
2647{
2648    for (int i = 0; i < len; i++) {
2649       if (arr[i].fwk_name == fwk_name)
2650           return arr[i].hal_name;
2651    }
2652    ALOGE("%s: Cannot find matching hal type", __func__);
2653    return NAME_NOT_FOUND;
2654}
2655
2656/*===========================================================================
2657 * FUNCTION   : getCapabilities
2658 *
2659 * DESCRIPTION: query camera capabilities
2660 *
2661 * PARAMETERS :
2662 *   @cameraId  : camera Id
2663 *   @info      : camera info struct to be filled in with camera capabilities
2664 *
2665 * RETURN     : int32_t type of status
2666 *              NO_ERROR  -- success
2667 *              none-zero failure code
2668 *==========================================================================*/
2669int QCamera3HardwareInterface::getCamInfo(int cameraId,
2670                                    struct camera_info *info)
2671{
2672    int rc = 0;
2673
2674    if (NULL == gCamCapability[cameraId]) {
2675        rc = initCapabilities(cameraId);
2676        if (rc < 0) {
2677            //pthread_mutex_unlock(&g_camlock);
2678            return rc;
2679        }
2680    }
2681
2682    if (NULL == gStaticMetadata[cameraId]) {
2683        rc = initStaticMetadata(cameraId);
2684        if (rc < 0) {
2685            return rc;
2686        }
2687    }
2688
2689    switch(gCamCapability[cameraId]->position) {
2690    case CAM_POSITION_BACK:
2691        info->facing = CAMERA_FACING_BACK;
2692        break;
2693
2694    case CAM_POSITION_FRONT:
2695        info->facing = CAMERA_FACING_FRONT;
2696        break;
2697
2698    default:
2699        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2700        rc = -1;
2701        break;
2702    }
2703
2704
2705    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2706    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2707    info->static_camera_characteristics = gStaticMetadata[cameraId];
2708
2709    return rc;
2710}
2711
2712/*===========================================================================
2713 * FUNCTION   : translateMetadata
2714 *
2715 * DESCRIPTION: translate the metadata into camera_metadata_t
2716 *
2717 * PARAMETERS : type of the request
2718 *
2719 *
2720 * RETURN     : success: camera_metadata_t*
2721 *              failure: NULL
2722 *
2723 *==========================================================================*/
2724camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2725{
2726    pthread_mutex_lock(&mMutex);
2727
2728    if (mDefaultMetadata[type] != NULL) {
2729        pthread_mutex_unlock(&mMutex);
2730        return mDefaultMetadata[type];
2731    }
2732    //first time we are handling this request
2733    //fill up the metadata structure using the wrapper class
2734    CameraMetadata settings;
2735    //translate from cam_capability_t to camera_metadata_tag_t
2736    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2737    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2738
2739    /*control*/
2740
2741    uint8_t controlIntent = 0;
2742    switch (type) {
2743      case CAMERA3_TEMPLATE_PREVIEW:
2744        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2745        break;
2746      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2747        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2748        break;
2749      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2750        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2751        break;
2752      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2753        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2754        break;
2755      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2756        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2757        break;
2758      default:
2759        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2760        break;
2761    }
2762    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2763
2764    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2765            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2766
2767    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2768    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2769
2770    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2771    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2772
2773    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2774    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2775
2776    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2777    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2778
2779    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2780    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2781
2782    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2783    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2784
2785    static uint8_t focusMode;
2786    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2787        ALOGE("%s: Setting focus mode to auto", __func__);
2788        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2789    } else {
2790        ALOGE("%s: Setting focus mode to off", __func__);
2791        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2792    }
2793    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2794
2795    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2796    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2797
2798    /*flash*/
2799    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2800    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2801
2802    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2803    settings.update(ANDROID_FLASH_FIRING_POWER,
2804            &flashFiringLevel, 1);
2805
2806    /* lens */
2807    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2808    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2809
2810    if (gCamCapability[mCameraId]->filter_densities_count) {
2811        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2812        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2813                        gCamCapability[mCameraId]->filter_densities_count);
2814    }
2815
2816    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2817    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2818
2819    /* frame duration */
2820    int64_t default_frame_duration = NSEC_PER_33MSEC;
2821    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2822
2823    /* sensitivity */
2824    int32_t default_sensitivity = 100;
2825    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2826
2827    mDefaultMetadata[type] = settings.release();
2828
2829    pthread_mutex_unlock(&mMutex);
2830    return mDefaultMetadata[type];
2831}
2832
2833/*===========================================================================
2834 * FUNCTION   : setFrameParameters
2835 *
2836 * DESCRIPTION: set parameters per frame as requested in the metadata from
2837 *              framework
2838 *
2839 * PARAMETERS :
2840 *   @request   : request that needs to be serviced
2841 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2842 *
2843 * RETURN     : success: NO_ERROR
2844 *              failure:
2845 *==========================================================================*/
2846int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2847                    uint32_t streamTypeMask)
2848{
2849    /*translate from camera_metadata_t type to parm_type_t*/
2850    int rc = 0;
2851    if (request->settings == NULL && mFirstRequest) {
2852        /*settings cannot be null for the first request*/
2853        return BAD_VALUE;
2854    }
2855
2856    int32_t hal_version = CAM_HAL_V3;
2857
2858    memset(mParameters, 0, sizeof(parm_buffer_t));
2859    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2860    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2861                sizeof(hal_version), &hal_version);
2862    if (rc < 0) {
2863        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2864        return BAD_VALUE;
2865    }
2866
2867    /*we need to update the frame number in the parameters*/
2868    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2869                                sizeof(request->frame_number), &(request->frame_number));
2870    if (rc < 0) {
2871        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2872        return BAD_VALUE;
2873    }
2874
2875    /* Update stream id mask where buffers are requested */
2876    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2877                                sizeof(streamTypeMask), &streamTypeMask);
2878    if (rc < 0) {
2879        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2880        return BAD_VALUE;
2881    }
2882
2883    if(request->settings != NULL){
2884        rc = translateMetadataToParameters(request);
2885    }
2886    /*set the parameters to backend*/
2887    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2888    return rc;
2889}
2890
2891/*===========================================================================
2892 * FUNCTION   : translateMetadataToParameters
2893 *
2894 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2895 *
2896 *
2897 * PARAMETERS :
2898 *   @request  : request sent from framework
2899 *
2900 *
2901 * RETURN     : success: NO_ERROR
2902 *              failure:
2903 *==========================================================================*/
2904int QCamera3HardwareInterface::translateMetadataToParameters
2905                                  (const camera3_capture_request_t *request)
2906{
2907    int rc = 0;
2908    CameraMetadata frame_settings;
2909    frame_settings = request->settings;
2910
2911    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2912        int32_t antibandingMode =
2913            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2914        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2915                sizeof(antibandingMode), &antibandingMode);
2916    }
2917
2918    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2919        int32_t expCompensation = frame_settings.find(
2920            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2921        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2922            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2923        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2924            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2925        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2926          sizeof(expCompensation), &expCompensation);
2927    }
2928
2929    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2930        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2931        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2932                sizeof(aeLock), &aeLock);
2933    }
2934    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2935        cam_fps_range_t fps_range;
2936        fps_range.min_fps =
2937            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2938        fps_range.max_fps =
2939            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2940        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2941                sizeof(fps_range), &fps_range);
2942    }
2943
2944    float focalDistance = -1.0;
2945    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2946        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2947        rc = AddSetParmEntryToBatch(mParameters,
2948                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2949                sizeof(focalDistance), &focalDistance);
2950    }
2951
2952    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2953        uint8_t fwk_focusMode =
2954            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2955        uint8_t focusMode;
2956        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2957            focusMode = CAM_FOCUS_MODE_INFINITY;
2958        } else{
2959         focusMode = lookupHalName(FOCUS_MODES_MAP,
2960                                   sizeof(FOCUS_MODES_MAP),
2961                                   fwk_focusMode);
2962        }
2963        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2964                sizeof(focusMode), &focusMode);
2965    }
2966
2967    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2968        uint8_t awbLock =
2969            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2970        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2971                sizeof(awbLock), &awbLock);
2972    }
2973
2974    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2975        uint8_t fwk_whiteLevel =
2976            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2977        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2978                sizeof(WHITE_BALANCE_MODES_MAP),
2979                fwk_whiteLevel);
2980        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2981                sizeof(whiteLevel), &whiteLevel);
2982    }
2983
2984    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2985        uint8_t fwk_effectMode =
2986            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2987        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2988                sizeof(EFFECT_MODES_MAP),
2989                fwk_effectMode);
2990        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2991                sizeof(effectMode), &effectMode);
2992    }
2993
2994    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2995        uint8_t fwk_aeMode =
2996            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2997        uint8_t aeMode;
2998        int32_t redeye;
2999
3000        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3001            aeMode = CAM_AE_MODE_OFF;
3002        } else {
3003            aeMode = CAM_AE_MODE_ON;
3004        }
3005        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3006            redeye = 1;
3007        } else {
3008            redeye = 0;
3009        }
3010
3011        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3012                                          sizeof(AE_FLASH_MODE_MAP),
3013                                          fwk_aeMode);
3014        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3015                sizeof(aeMode), &aeMode);
3016        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3017                sizeof(flashMode), &flashMode);
3018        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3019                sizeof(redeye), &redeye);
3020    }
3021
3022    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3023        uint8_t colorCorrectMode =
3024            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3025        rc =
3026            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3027                    sizeof(colorCorrectMode), &colorCorrectMode);
3028    }
3029
3030    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3031        cam_color_correct_gains_t colorCorrectGains;
3032        for (int i = 0; i < 4; i++) {
3033            colorCorrectGains.gains[i] =
3034                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3035        }
3036        rc =
3037            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3038                    sizeof(colorCorrectGains), &colorCorrectGains);
3039    }
3040
3041    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3042        cam_color_correct_matrix_t colorCorrectTransform;
3043        cam_rational_type_t transform_elem;
3044        int num = 0;
3045        for (int i = 0; i < 3; i++) {
3046           for (int j = 0; j < 3; j++) {
3047              transform_elem.numerator =
3048                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3049              transform_elem.denominator =
3050                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3051              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3052              num++;
3053           }
3054        }
3055        rc =
3056            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3057                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3058    }
3059
3060    cam_trigger_t aecTrigger;
3061    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3062    aecTrigger.trigger_id = -1;
3063    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3064        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3065        aecTrigger.trigger =
3066            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3067        aecTrigger.trigger_id =
3068            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3069    }
3070    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3071                                sizeof(aecTrigger), &aecTrigger);
3072
3073    /*af_trigger must come with a trigger id*/
3074    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3075        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3076        cam_trigger_t af_trigger;
3077        af_trigger.trigger =
3078            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3079        af_trigger.trigger_id =
3080            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3081        rc = AddSetParmEntryToBatch(mParameters,
3082                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3083    }
3084
3085    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3086        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3087        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3088                sizeof(metaMode), &metaMode);
3089        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3090           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3091           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3092                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3093                                             fwk_sceneMode);
3094           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3095                sizeof(sceneMode), &sceneMode);
3096        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3097           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3098           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3099                sizeof(sceneMode), &sceneMode);
3100        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3101           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3102           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3103                sizeof(sceneMode), &sceneMode);
3104        }
3105    }
3106
3107    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3108        int32_t demosaic =
3109            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3110        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3111                sizeof(demosaic), &demosaic);
3112    }
3113
3114    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3115        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3116        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3117                sizeof(edgeMode), &edgeMode);
3118    }
3119
3120    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3121        int32_t edgeStrength =
3122            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3123        rc = AddSetParmEntryToBatch(mParameters,
3124                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3125    }
3126
3127    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3128        int32_t respectFlashMode = 1;
3129        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3130            uint8_t fwk_aeMode =
3131                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3132            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3133                respectFlashMode = 0;
3134                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3135                    __func__);
3136            }
3137        }
3138        if (respectFlashMode) {
3139            uint8_t flashMode =
3140                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3141            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3142                                          sizeof(FLASH_MODES_MAP),
3143                                          flashMode);
3144            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3145            // To check: CAM_INTF_META_FLASH_MODE usage
3146            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3147                          sizeof(flashMode), &flashMode);
3148        }
3149    }
3150
3151    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3152        uint8_t flashPower =
3153            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3154        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3155                sizeof(flashPower), &flashPower);
3156    }
3157
3158    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3159        int64_t flashFiringTime =
3160            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3161        rc = AddSetParmEntryToBatch(mParameters,
3162                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3163    }
3164
3165    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3166        uint8_t geometricMode =
3167            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3168        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3169                sizeof(geometricMode), &geometricMode);
3170    }
3171
3172    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3173        uint8_t geometricStrength =
3174            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3175        rc = AddSetParmEntryToBatch(mParameters,
3176                CAM_INTF_META_GEOMETRIC_STRENGTH,
3177                sizeof(geometricStrength), &geometricStrength);
3178    }
3179
3180    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3181        uint8_t hotPixelMode =
3182            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3183        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3184                sizeof(hotPixelMode), &hotPixelMode);
3185    }
3186
3187    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3188        float lensAperture =
3189            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3190        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3191                sizeof(lensAperture), &lensAperture);
3192    }
3193
3194    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3195        float filterDensity =
3196            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3197        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3198                sizeof(filterDensity), &filterDensity);
3199    }
3200
3201    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3202        float focalLength =
3203            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3204        rc = AddSetParmEntryToBatch(mParameters,
3205                CAM_INTF_META_LENS_FOCAL_LENGTH,
3206                sizeof(focalLength), &focalLength);
3207    }
3208
3209    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3210        uint8_t optStabMode =
3211            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3212        rc = AddSetParmEntryToBatch(mParameters,
3213                CAM_INTF_META_LENS_OPT_STAB_MODE,
3214                sizeof(optStabMode), &optStabMode);
3215    }
3216
3217    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3218        uint8_t noiseRedMode =
3219            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3220        rc = AddSetParmEntryToBatch(mParameters,
3221                CAM_INTF_META_NOISE_REDUCTION_MODE,
3222                sizeof(noiseRedMode), &noiseRedMode);
3223    }
3224
3225    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3226        uint8_t noiseRedStrength =
3227            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3228        rc = AddSetParmEntryToBatch(mParameters,
3229                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3230                sizeof(noiseRedStrength), &noiseRedStrength);
3231    }
3232
3233    cam_crop_region_t scalerCropRegion;
3234    bool scalerCropSet = false;
3235    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3236        scalerCropRegion.left =
3237            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3238        scalerCropRegion.top =
3239            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3240        scalerCropRegion.width =
3241            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3242        scalerCropRegion.height =
3243            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3244        rc = AddSetParmEntryToBatch(mParameters,
3245                CAM_INTF_META_SCALER_CROP_REGION,
3246                sizeof(scalerCropRegion), &scalerCropRegion);
3247        scalerCropSet = true;
3248    }
3249
3250    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3251        int64_t sensorExpTime =
3252            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3253        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3254        rc = AddSetParmEntryToBatch(mParameters,
3255                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3256                sizeof(sensorExpTime), &sensorExpTime);
3257    }
3258
3259    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3260        int64_t sensorFrameDuration =
3261            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3262        int64_t minFrameDuration = getMinFrameDuration(request);
3263        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3264        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3265            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3266        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3267        rc = AddSetParmEntryToBatch(mParameters,
3268                CAM_INTF_META_SENSOR_FRAME_DURATION,
3269                sizeof(sensorFrameDuration), &sensorFrameDuration);
3270    }
3271
3272    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3273        int32_t sensorSensitivity =
3274            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3275        if (sensorSensitivity <
3276                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3277            sensorSensitivity =
3278                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3279        if (sensorSensitivity >
3280                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3281            sensorSensitivity =
3282                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3283        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3284        rc = AddSetParmEntryToBatch(mParameters,
3285                CAM_INTF_META_SENSOR_SENSITIVITY,
3286                sizeof(sensorSensitivity), &sensorSensitivity);
3287    }
3288
3289    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3290        int32_t shadingMode =
3291            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3292        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3293                sizeof(shadingMode), &shadingMode);
3294    }
3295
3296    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3297        uint8_t shadingStrength =
3298            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3299        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3300                sizeof(shadingStrength), &shadingStrength);
3301    }
3302
3303    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3304        uint8_t fwk_facedetectMode =
3305            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3306        uint8_t facedetectMode =
3307            lookupHalName(FACEDETECT_MODES_MAP,
3308                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3309        rc = AddSetParmEntryToBatch(mParameters,
3310                CAM_INTF_META_STATS_FACEDETECT_MODE,
3311                sizeof(facedetectMode), &facedetectMode);
3312    }
3313
3314    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3315        uint8_t histogramMode =
3316            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3317        rc = AddSetParmEntryToBatch(mParameters,
3318                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3319                sizeof(histogramMode), &histogramMode);
3320    }
3321
3322    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3323        uint8_t sharpnessMapMode =
3324            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3325        rc = AddSetParmEntryToBatch(mParameters,
3326                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3327                sizeof(sharpnessMapMode), &sharpnessMapMode);
3328    }
3329
3330    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3331        uint8_t tonemapMode =
3332            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3333        rc = AddSetParmEntryToBatch(mParameters,
3334                CAM_INTF_META_TONEMAP_MODE,
3335                sizeof(tonemapMode), &tonemapMode);
3336    }
3337    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3338    /*All tonemap channels will have the same number of points*/
3339    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3340        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3341        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3342        cam_rgb_tonemap_curves tonemapCurves;
3343        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3344
3345        /* ch0 = G*/
3346        int point = 0;
3347        cam_tonemap_curve_t tonemapCurveGreen;
3348        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3349            for (int j = 0; j < 2; j++) {
3350               tonemapCurveGreen.tonemap_points[i][j] =
3351                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3352               point++;
3353            }
3354        }
3355        tonemapCurves.curves[0] = tonemapCurveGreen;
3356
3357        /* ch 1 = B */
3358        point = 0;
3359        cam_tonemap_curve_t tonemapCurveBlue;
3360        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3361            for (int j = 0; j < 2; j++) {
3362               tonemapCurveBlue.tonemap_points[i][j] =
3363                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3364               point++;
3365            }
3366        }
3367        tonemapCurves.curves[1] = tonemapCurveBlue;
3368
3369        /* ch 2 = R */
3370        point = 0;
3371        cam_tonemap_curve_t tonemapCurveRed;
3372        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3373            for (int j = 0; j < 2; j++) {
3374               tonemapCurveRed.tonemap_points[i][j] =
3375                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3376               point++;
3377            }
3378        }
3379        tonemapCurves.curves[2] = tonemapCurveRed;
3380
3381        rc = AddSetParmEntryToBatch(mParameters,
3382                CAM_INTF_META_TONEMAP_CURVES,
3383                sizeof(tonemapCurves), &tonemapCurves);
3384    }
3385
3386    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3387        uint8_t captureIntent =
3388            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3389        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3390                sizeof(captureIntent), &captureIntent);
3391    }
3392
3393    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3394        uint8_t blackLevelLock =
3395            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3396        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3397                sizeof(blackLevelLock), &blackLevelLock);
3398    }
3399
3400    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3401        uint8_t lensShadingMapMode =
3402            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3403        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3404                sizeof(lensShadingMapMode), &lensShadingMapMode);
3405    }
3406
3407    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3408        cam_area_t roi;
3409        bool reset = true;
3410        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3411        if (scalerCropSet) {
3412            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3413        }
3414        if (reset) {
3415            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3416                    sizeof(roi), &roi);
3417        }
3418    }
3419
3420    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3421        cam_area_t roi;
3422        bool reset = true;
3423        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3424        if (scalerCropSet) {
3425            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3426        }
3427        if (reset) {
3428            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3429                    sizeof(roi), &roi);
3430        }
3431    }
3432
3433    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3434        cam_area_t roi;
3435        bool reset = true;
3436        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3437        if (scalerCropSet) {
3438            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3439        }
3440        if (reset) {
3441            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3442                    sizeof(roi), &roi);
3443        }
3444    }
3445    return rc;
3446}
3447
3448/*===========================================================================
3449 * FUNCTION   : getJpegSettings
3450 *
3451 * DESCRIPTION: save the jpeg settings in the HAL
3452 *
3453 *
3454 * PARAMETERS :
3455 *   @settings  : frame settings information from framework
3456 *
3457 *
3458 * RETURN     : success: NO_ERROR
3459 *              failure:
3460 *==========================================================================*/
3461int QCamera3HardwareInterface::getJpegSettings
3462                                  (const camera_metadata_t *settings)
3463{
3464    if (mJpegSettings) {
3465        if (mJpegSettings->gps_timestamp) {
3466            free(mJpegSettings->gps_timestamp);
3467            mJpegSettings->gps_timestamp = NULL;
3468        }
3469        if (mJpegSettings->gps_coordinates) {
3470            for (int i = 0; i < 3; i++) {
3471                free(mJpegSettings->gps_coordinates[i]);
3472                mJpegSettings->gps_coordinates[i] = NULL;
3473            }
3474        }
3475        free(mJpegSettings);
3476        mJpegSettings = NULL;
3477    }
3478    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3479    CameraMetadata jpeg_settings;
3480    jpeg_settings = settings;
3481
3482    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3483        mJpegSettings->jpeg_orientation =
3484            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3485    } else {
3486        mJpegSettings->jpeg_orientation = 0;
3487    }
3488    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3489        mJpegSettings->jpeg_quality =
3490            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3491    } else {
3492        mJpegSettings->jpeg_quality = 85;
3493    }
3494    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3495        mJpegSettings->thumbnail_size.width =
3496            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3497        mJpegSettings->thumbnail_size.height =
3498            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3499    } else {
3500        mJpegSettings->thumbnail_size.width = 0;
3501        mJpegSettings->thumbnail_size.height = 0;
3502    }
3503    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3504        for (int i = 0; i < 3; i++) {
3505            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3506            *(mJpegSettings->gps_coordinates[i]) =
3507                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3508        }
3509    } else{
3510       for (int i = 0; i < 3; i++) {
3511            mJpegSettings->gps_coordinates[i] = NULL;
3512        }
3513    }
3514
3515    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3516        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3517        *(mJpegSettings->gps_timestamp) =
3518            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3519    } else {
3520        mJpegSettings->gps_timestamp = NULL;
3521    }
3522
3523    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3524        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3525        for (int i = 0; i < len; i++) {
3526            mJpegSettings->gps_processing_method[i] =
3527                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3528        }
3529        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3530            mJpegSettings->gps_processing_method[len] = '\0';
3531        }
3532    } else {
3533        mJpegSettings->gps_processing_method[0] = '\0';
3534    }
3535
3536    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3537        mJpegSettings->sensor_sensitivity =
3538            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3539    } else {
3540        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3541    }
3542
3543    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3544
3545    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3546        mJpegSettings->lens_focal_length =
3547            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3548    }
3549    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3550        mJpegSettings->exposure_compensation =
3551            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3552    }
3553    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3554    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3555    mJpegSettings->is_jpeg_format = true;
3556    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3557    return 0;
3558}
3559
3560/*===========================================================================
3561 * FUNCTION   : captureResultCb
3562 *
3563 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3564 *
3565 * PARAMETERS :
3566 *   @frame  : frame information from mm-camera-interface
3567 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3568 *   @userdata: userdata
3569 *
3570 * RETURN     : NONE
3571 *==========================================================================*/
3572void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3573                camera3_stream_buffer_t *buffer,
3574                uint32_t frame_number, void *userdata)
3575{
3576    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3577    if (hw == NULL) {
3578        ALOGE("%s: Invalid hw %p", __func__, hw);
3579        return;
3580    }
3581
3582    hw->captureResultCb(metadata, buffer, frame_number);
3583    return;
3584}
3585
3586
3587/*===========================================================================
3588 * FUNCTION   : initialize
3589 *
3590 * DESCRIPTION: Pass framework callback pointers to HAL
3591 *
3592 * PARAMETERS :
3593 *
3594 *
3595 * RETURN     : Success : 0
3596 *              Failure: -ENODEV
3597 *==========================================================================*/
3598
3599int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3600                                  const camera3_callback_ops_t *callback_ops)
3601{
3602    ALOGV("%s: E", __func__);
3603    QCamera3HardwareInterface *hw =
3604        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3605    if (!hw) {
3606        ALOGE("%s: NULL camera device", __func__);
3607        return -ENODEV;
3608    }
3609
3610    int rc = hw->initialize(callback_ops);
3611    ALOGV("%s: X", __func__);
3612    return rc;
3613}
3614
3615/*===========================================================================
3616 * FUNCTION   : configure_streams
3617 *
3618 * DESCRIPTION:
3619 *
3620 * PARAMETERS :
3621 *
3622 *
3623 * RETURN     : Success: 0
3624 *              Failure: -EINVAL (if stream configuration is invalid)
3625 *                       -ENODEV (fatal error)
3626 *==========================================================================*/
3627
3628int QCamera3HardwareInterface::configure_streams(
3629        const struct camera3_device *device,
3630        camera3_stream_configuration_t *stream_list)
3631{
3632    ALOGV("%s: E", __func__);
3633    QCamera3HardwareInterface *hw =
3634        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3635    if (!hw) {
3636        ALOGE("%s: NULL camera device", __func__);
3637        return -ENODEV;
3638    }
3639    int rc = hw->configureStreams(stream_list);
3640    ALOGV("%s: X", __func__);
3641    return rc;
3642}
3643
3644/*===========================================================================
3645 * FUNCTION   : register_stream_buffers
3646 *
3647 * DESCRIPTION: Register stream buffers with the device
3648 *
3649 * PARAMETERS :
3650 *
3651 * RETURN     :
3652 *==========================================================================*/
3653int QCamera3HardwareInterface::register_stream_buffers(
3654        const struct camera3_device *device,
3655        const camera3_stream_buffer_set_t *buffer_set)
3656{
3657    ALOGV("%s: E", __func__);
3658    QCamera3HardwareInterface *hw =
3659        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3660    if (!hw) {
3661        ALOGE("%s: NULL camera device", __func__);
3662        return -ENODEV;
3663    }
3664    int rc = hw->registerStreamBuffers(buffer_set);
3665    ALOGV("%s: X", __func__);
3666    return rc;
3667}
3668
3669/*===========================================================================
3670 * FUNCTION   : construct_default_request_settings
3671 *
3672 * DESCRIPTION: Configure a settings buffer to meet the required use case
3673 *
3674 * PARAMETERS :
3675 *
3676 *
3677 * RETURN     : Success: Return valid metadata
3678 *              Failure: Return NULL
3679 *==========================================================================*/
3680const camera_metadata_t* QCamera3HardwareInterface::
3681    construct_default_request_settings(const struct camera3_device *device,
3682                                        int type)
3683{
3684
3685    ALOGV("%s: E", __func__);
3686    camera_metadata_t* fwk_metadata = NULL;
3687    QCamera3HardwareInterface *hw =
3688        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3689    if (!hw) {
3690        ALOGE("%s: NULL camera device", __func__);
3691        return NULL;
3692    }
3693
3694    fwk_metadata = hw->translateCapabilityToMetadata(type);
3695
3696    ALOGV("%s: X", __func__);
3697    return fwk_metadata;
3698}
3699
3700/*===========================================================================
3701 * FUNCTION   : process_capture_request
3702 *
3703 * DESCRIPTION:
3704 *
3705 * PARAMETERS :
3706 *
3707 *
3708 * RETURN     :
3709 *==========================================================================*/
3710int QCamera3HardwareInterface::process_capture_request(
3711                    const struct camera3_device *device,
3712                    camera3_capture_request_t *request)
3713{
3714    ALOGV("%s: E", __func__);
3715    QCamera3HardwareInterface *hw =
3716        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3717    if (!hw) {
3718        ALOGE("%s: NULL camera device", __func__);
3719        return -EINVAL;
3720    }
3721
3722    int rc = hw->processCaptureRequest(request);
3723    ALOGV("%s: X", __func__);
3724    return rc;
3725}
3726
3727/*===========================================================================
3728 * FUNCTION   : get_metadata_vendor_tag_ops
3729 *
3730 * DESCRIPTION:
3731 *
3732 * PARAMETERS :
3733 *
3734 *
3735 * RETURN     :
3736 *==========================================================================*/
3737
3738void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3739                const struct camera3_device *device,
3740                vendor_tag_query_ops_t* ops)
3741{
3742    ALOGV("%s: E", __func__);
3743    QCamera3HardwareInterface *hw =
3744        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3745    if (!hw) {
3746        ALOGE("%s: NULL camera device", __func__);
3747        return;
3748    }
3749
3750    hw->getMetadataVendorTagOps(ops);
3751    ALOGV("%s: X", __func__);
3752    return;
3753}
3754
3755/*===========================================================================
3756 * FUNCTION   : dump
3757 *
3758 * DESCRIPTION:
3759 *
3760 * PARAMETERS :
3761 *
3762 *
3763 * RETURN     :
3764 *==========================================================================*/
3765
3766void QCamera3HardwareInterface::dump(
3767                const struct camera3_device *device, int fd)
3768{
3769    ALOGV("%s: E", __func__);
3770    QCamera3HardwareInterface *hw =
3771        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3772    if (!hw) {
3773        ALOGE("%s: NULL camera device", __func__);
3774        return;
3775    }
3776
3777    hw->dump(fd);
3778    ALOGV("%s: X", __func__);
3779    return;
3780}
3781
3782/*===========================================================================
3783 * FUNCTION   : flush
3784 *
3785 * DESCRIPTION:
3786 *
3787 * PARAMETERS :
3788 *
3789 *
3790 * RETURN     :
3791 *==========================================================================*/
3792
3793int QCamera3HardwareInterface::flush(
3794                const struct camera3_device *device)
3795{
3796    int rc;
3797    ALOGV("%s: E", __func__);
3798    QCamera3HardwareInterface *hw =
3799        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3800    if (!hw) {
3801        ALOGE("%s: NULL camera device", __func__);
3802        return -EINVAL;
3803    }
3804
3805    rc = hw->flush();
3806    ALOGV("%s: X", __func__);
3807    return rc;
3808}
3809
3810/*===========================================================================
3811 * FUNCTION   : close_camera_device
3812 *
3813 * DESCRIPTION:
3814 *
3815 * PARAMETERS :
3816 *
3817 *
3818 * RETURN     :
3819 *==========================================================================*/
3820int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3821{
3822    ALOGV("%s: E", __func__);
3823    int ret = NO_ERROR;
3824    QCamera3HardwareInterface *hw =
3825        reinterpret_cast<QCamera3HardwareInterface *>(
3826            reinterpret_cast<camera3_device_t *>(device)->priv);
3827    if (!hw) {
3828        ALOGE("NULL camera device");
3829        return BAD_VALUE;
3830    }
3831    delete hw;
3832
3833    pthread_mutex_lock(&mCameraSessionLock);
3834    mCameraSessionActive = 0;
3835    pthread_mutex_unlock(&mCameraSessionLock);
3836    ALOGV("%s: X", __func__);
3837    return ret;
3838}
3839
3840/*===========================================================================
3841 * FUNCTION   : getWaveletDenoiseProcessPlate
3842 *
3843 * DESCRIPTION: query wavelet denoise process plate
3844 *
3845 * PARAMETERS : None
3846 *
3847 * RETURN     : WNR prcocess plate vlaue
3848 *==========================================================================*/
3849cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3850{
3851    char prop[PROPERTY_VALUE_MAX];
3852    memset(prop, 0, sizeof(prop));
3853    property_get("persist.denoise.process.plates", prop, "0");
3854    int processPlate = atoi(prop);
3855    switch(processPlate) {
3856    case 0:
3857        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3858    case 1:
3859        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3860    case 2:
3861        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3862    case 3:
3863        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3864    default:
3865        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3866    }
3867}
3868
3869/*===========================================================================
3870 * FUNCTION   : needRotationReprocess
3871 *
3872 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3873 *
3874 * PARAMETERS : none
3875 *
3876 * RETURN     : true: needed
3877 *              false: no need
3878 *==========================================================================*/
3879bool QCamera3HardwareInterface::needRotationReprocess()
3880{
3881
3882    if (!mJpegSettings->is_jpeg_format) {
3883        // RAW image, no need to reprocess
3884        return false;
3885    }
3886
3887    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3888        mJpegSettings->jpeg_orientation > 0) {
3889        // current rotation is not zero, and pp has the capability to process rotation
3890        ALOGD("%s: need do reprocess for rotation", __func__);
3891        return true;
3892    }
3893
3894    return false;
3895}
3896
3897/*===========================================================================
3898 * FUNCTION   : needReprocess
3899 *
3900 * DESCRIPTION: if reprocess in needed
3901 *
3902 * PARAMETERS : none
3903 *
3904 * RETURN     : true: needed
3905 *              false: no need
3906 *==========================================================================*/
3907bool QCamera3HardwareInterface::needReprocess()
3908{
3909    if (!mJpegSettings->is_jpeg_format) {
3910        // RAW image, no need to reprocess
3911        return false;
3912    }
3913
3914    if ((mJpegSettings->min_required_pp_mask > 0) ||
3915         isWNREnabled()) {
3916        // TODO: add for ZSL HDR later
3917        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3918        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3919        return true;
3920    }
3921    return needRotationReprocess();
3922}
3923
3924/*===========================================================================
3925 * FUNCTION   : addOnlineReprocChannel
3926 *
3927 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3928 *              coming from input channel
3929 *
3930 * PARAMETERS :
3931 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3932 *
3933 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3934 *==========================================================================*/
3935QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3936              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3937{
3938    int32_t rc = NO_ERROR;
3939    QCamera3ReprocessChannel *pChannel = NULL;
3940    if (pInputChannel == NULL) {
3941        ALOGE("%s: input channel obj is NULL", __func__);
3942        return NULL;
3943    }
3944
3945    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3946            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3947    if (NULL == pChannel) {
3948        ALOGE("%s: no mem for reprocess channel", __func__);
3949        return NULL;
3950    }
3951
3952    // Capture channel, only need snapshot and postview streams start together
3953    mm_camera_channel_attr_t attr;
3954    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3955    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3956    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3957    rc = pChannel->initialize();
3958    if (rc != NO_ERROR) {
3959        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3960        delete pChannel;
3961        return NULL;
3962    }
3963
3964    // pp feature config
3965    cam_pp_feature_config_t pp_config;
3966    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3967    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3968        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3969        pp_config.sharpness = 10;
3970    }
3971
3972    if (isWNREnabled()) {
3973        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3974        pp_config.denoise2d.denoise_enable = 1;
3975        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3976    }
3977    if (needRotationReprocess()) {
3978        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3979        int rotation = mJpegSettings->jpeg_orientation;
3980        if (rotation == 0) {
3981            pp_config.rotation = ROTATE_0;
3982        } else if (rotation == 90) {
3983            pp_config.rotation = ROTATE_90;
3984        } else if (rotation == 180) {
3985            pp_config.rotation = ROTATE_180;
3986        } else if (rotation == 270) {
3987            pp_config.rotation = ROTATE_270;
3988        }
3989    }
3990
3991   rc = pChannel->addReprocStreamsFromSource(pp_config,
3992                                             pInputChannel,
3993                                             mMetadataChannel);
3994
3995    if (rc != NO_ERROR) {
3996        delete pChannel;
3997        return NULL;
3998    }
3999    return pChannel;
4000}
4001
4002int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4003{
4004    return gCamCapability[mCameraId]->min_num_pp_bufs;
4005}
4006
4007bool QCamera3HardwareInterface::isWNREnabled() {
4008    return gCamCapability[mCameraId]->isWnrSupported;
4009}
4010
4011}; //end namespace qcamera
4012