QCamera3HWI.cpp revision f7dca66435f16b52df005bd2fbb4fc0992c24c7d
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    //Create metadata channel and initialize it
405    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
406                    mCameraHandle->ops, captureResultCb,
407                    &gCamCapability[mCameraId]->padding_info, this);
408    if (mMetadataChannel == NULL) {
409        ALOGE("%s: failed to allocate metadata channel", __func__);
410        rc = -ENOMEM;
411        goto err2;
412    }
413    rc = mMetadataChannel->initialize();
414    if (rc < 0) {
415        ALOGE("%s: metadata channel initialization failed", __func__);
416        goto err3;
417    }
418
419    mCallbackOps = callback_ops;
420
421    pthread_mutex_unlock(&mMutex);
422    mCameraInitialized = true;
423    return 0;
424
425err3:
426    delete mMetadataChannel;
427    mMetadataChannel = NULL;
428err2:
429    deinitParameters();
430err1:
431    pthread_mutex_unlock(&mMutex);
432    return rc;
433}
434
435/*===========================================================================
436 * FUNCTION   : configureStreams
437 *
438 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
439 *              and output streams.
440 *
441 * PARAMETERS :
442 *   @stream_list : streams to be configured
443 *
444 * RETURN     :
445 *
446 *==========================================================================*/
447int QCamera3HardwareInterface::configureStreams(
448        camera3_stream_configuration_t *streamList)
449{
450    int rc = 0;
451    mIsZslMode = false;
452    pthread_mutex_lock(&mMutex);
453    // Sanity check stream_list
454    if (streamList == NULL) {
455        ALOGE("%s: NULL stream configuration", __func__);
456        pthread_mutex_unlock(&mMutex);
457        return BAD_VALUE;
458    }
459
460    if (streamList->streams == NULL) {
461        ALOGE("%s: NULL stream list", __func__);
462        pthread_mutex_unlock(&mMutex);
463        return BAD_VALUE;
464    }
465
466    if (streamList->num_streams < 1) {
467        ALOGE("%s: Bad number of streams requested: %d", __func__,
468                streamList->num_streams);
469        pthread_mutex_unlock(&mMutex);
470        return BAD_VALUE;
471    }
472
473    camera3_stream_t *inputStream = NULL;
474    camera3_stream_t *jpegStream = NULL;
475    cam_stream_size_info_t stream_config_info;
476
477    /* first invalidate all the steams in the mStreamList
478     * if they appear again, they will be validated */
479    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
480            it != mStreamInfo.end(); it++) {
481        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
482        channel->stop();
483        (*it)->status = INVALID;
484    }
485
486    for (size_t i = 0; i < streamList->num_streams; i++) {
487        camera3_stream_t *newStream = streamList->streams[i];
488        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
489                __func__, newStream->stream_type, newStream->format,
490                 newStream->width, newStream->height);
491        //if the stream is in the mStreamList validate it
492        bool stream_exists = false;
493        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
494                it != mStreamInfo.end(); it++) {
495            if ((*it)->stream == newStream) {
496                QCamera3Channel *channel =
497                    (QCamera3Channel*)(*it)->stream->priv;
498                stream_exists = true;
499                (*it)->status = RECONFIGURE;
500                /*delete the channel object associated with the stream because
501                  we need to reconfigure*/
502                delete channel;
503                (*it)->stream->priv = NULL;
504            }
505        }
506        if (!stream_exists) {
507            //new stream
508            stream_info_t* stream_info;
509            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
510            stream_info->stream = newStream;
511            stream_info->status = VALID;
512            stream_info->registered = 0;
513            mStreamInfo.push_back(stream_info);
514        }
515        if (newStream->stream_type == CAMERA3_STREAM_INPUT
516                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
517            if (inputStream != NULL) {
518                ALOGE("%s: Multiple input streams requested!", __func__);
519                pthread_mutex_unlock(&mMutex);
520                return BAD_VALUE;
521            }
522            inputStream = newStream;
523        }
524        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
525            jpegStream = newStream;
526        }
527    }
528    mInputStream = inputStream;
529
530    /*clean up invalid streams*/
531    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
532            it != mStreamInfo.end();) {
533        if(((*it)->status) == INVALID){
534            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
535            delete channel;
536            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
537            free(*it);
538            it = mStreamInfo.erase(it);
539        } else {
540            it++;
541        }
542    }
543
544    //mMetadataChannel->stop();
545
546    /* Allocate channel objects for the requested streams */
547    for (size_t i = 0; i < streamList->num_streams; i++) {
548        camera3_stream_t *newStream = streamList->streams[i];
549        uint32_t stream_usage = newStream->usage;
550        stream_config_info.stream_sizes[i].width = newStream->width;
551        stream_config_info.stream_sizes[i].height = newStream->height;
552        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
553            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
554            //for zsl stream the size is jpeg size
555            stream_config_info.stream_sizes[i].width = jpegStream->width;
556            stream_config_info.stream_sizes[i].height = jpegStream->height;
557            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
558        } else {
559           //for non zsl streams find out the format
560           switch (newStream->format) {
561           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
562              {
563                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
564                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
565                 } else {
566                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
567                 }
568              }
569              break;
570           case HAL_PIXEL_FORMAT_YCbCr_420_888:
571              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
572              break;
573           case HAL_PIXEL_FORMAT_BLOB:
574              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
575              break;
576           default:
577              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
578              break;
579           }
580        }
581        if (newStream->priv == NULL) {
582            //New stream, construct channel
583            switch (newStream->stream_type) {
584            case CAMERA3_STREAM_INPUT:
585                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
586                break;
587            case CAMERA3_STREAM_BIDIRECTIONAL:
588                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
589                    GRALLOC_USAGE_HW_CAMERA_WRITE;
590                break;
591            case CAMERA3_STREAM_OUTPUT:
592                /* For video encoding stream, set read/write rarely
593                 * flag so that they may be set to un-cached */
594                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
595                    newStream->usage =
596                         (GRALLOC_USAGE_SW_READ_RARELY |
597                         GRALLOC_USAGE_SW_WRITE_RARELY |
598                         GRALLOC_USAGE_HW_CAMERA_WRITE);
599                else
600                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
601                break;
602            default:
603                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
604                break;
605            }
606
607            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
608                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
609                QCamera3Channel *channel;
610                switch (newStream->format) {
611                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
612                case HAL_PIXEL_FORMAT_YCbCr_420_888:
613                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
614                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
615                        jpegStream) {
616                        uint32_t width = jpegStream->width;
617                        uint32_t height = jpegStream->height;
618                        mIsZslMode = true;
619                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
620                            mCameraHandle->ops, captureResultCb,
621                            &gCamCapability[mCameraId]->padding_info, this, newStream,
622                            width, height);
623                    } else
624                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
625                            mCameraHandle->ops, captureResultCb,
626                            &gCamCapability[mCameraId]->padding_info, this, newStream);
627                    if (channel == NULL) {
628                        ALOGE("%s: allocation of channel failed", __func__);
629                        pthread_mutex_unlock(&mMutex);
630                        return -ENOMEM;
631                    }
632
633                    newStream->priv = channel;
634                    break;
635                case HAL_PIXEL_FORMAT_BLOB:
636                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
637                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
638                            mCameraHandle->ops, captureResultCb,
639                            &gCamCapability[mCameraId]->padding_info, this, newStream);
640                    if (mPictureChannel == NULL) {
641                        ALOGE("%s: allocation of channel failed", __func__);
642                        pthread_mutex_unlock(&mMutex);
643                        return -ENOMEM;
644                    }
645                    newStream->priv = (QCamera3Channel*)mPictureChannel;
646                    break;
647
648                //TODO: Add support for app consumed format?
649                default:
650                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
651                    break;
652                }
653            }
654        } else {
655            // Channel already exists for this stream
656            // Do nothing for now
657        }
658    }
659    /*For the streams to be reconfigured we need to register the buffers
660      since the framework wont*/
661    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
662            it != mStreamInfo.end(); it++) {
663        if ((*it)->status == RECONFIGURE) {
664            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
665            /*only register buffers for streams that have already been
666              registered*/
667            if ((*it)->registered) {
668                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
669                        (*it)->buffer_set.buffers);
670                if (rc != NO_ERROR) {
671                    ALOGE("%s: Failed to register the buffers of old stream,\
672                            rc = %d", __func__, rc);
673                }
674                ALOGV("%s: channel %p has %d buffers",
675                        __func__, channel, (*it)->buffer_set.num_buffers);
676            }
677        }
678
679        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
680        if (index == NAME_NOT_FOUND) {
681            mPendingBuffersMap.add((*it)->stream, 0);
682        } else {
683            mPendingBuffersMap.editValueAt(index) = 0;
684        }
685    }
686
687    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
688    mPendingRequestsList.clear();
689
690    /*flush the metadata list*/
691    if (!mStoredMetadataList.empty()) {
692        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
693              m != mStoredMetadataList.end(); m++) {
694            mMetadataChannel->bufDone(m->meta_buf);
695            free(m->meta_buf);
696            m = mStoredMetadataList.erase(m);
697        }
698    }
699    int32_t hal_version = CAM_HAL_V3;
700    stream_config_info.num_streams = streamList->num_streams;
701
702    //settings/parameters don't carry over for new configureStreams
703    memset(mParameters, 0, sizeof(parm_buffer_t));
704
705    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
706    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
707                sizeof(hal_version), &hal_version);
708
709    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
710                sizeof(stream_config_info), &stream_config_info);
711
712    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
713
714    mFirstRequest = true;
715
716    //Get min frame duration for this streams configuration
717    deriveMinFrameDuration();
718
719    pthread_mutex_unlock(&mMutex);
720    return rc;
721}
722
723/*===========================================================================
724 * FUNCTION   : validateCaptureRequest
725 *
726 * DESCRIPTION: validate a capture request from camera service
727 *
728 * PARAMETERS :
729 *   @request : request from framework to process
730 *
731 * RETURN     :
732 *
733 *==========================================================================*/
734int QCamera3HardwareInterface::validateCaptureRequest(
735                    camera3_capture_request_t *request)
736{
737    ssize_t idx = 0;
738    const camera3_stream_buffer_t *b;
739    CameraMetadata meta;
740
741    /* Sanity check the request */
742    if (request == NULL) {
743        ALOGE("%s: NULL capture request", __func__);
744        return BAD_VALUE;
745    }
746
747    uint32_t frameNumber = request->frame_number;
748    if (request->input_buffer != NULL &&
749            request->input_buffer->stream != mInputStream) {
750        ALOGE("%s: Request %d: Input buffer not from input stream!",
751                __FUNCTION__, frameNumber);
752        return BAD_VALUE;
753    }
754    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
755        ALOGE("%s: Request %d: No output buffers provided!",
756                __FUNCTION__, frameNumber);
757        return BAD_VALUE;
758    }
759    if (request->input_buffer != NULL) {
760        b = request->input_buffer;
761        QCamera3Channel *channel =
762            static_cast<QCamera3Channel*>(b->stream->priv);
763        if (channel == NULL) {
764            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
765                    __func__, frameNumber, idx);
766            return BAD_VALUE;
767        }
768        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
769            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
770                    __func__, frameNumber, idx);
771            return BAD_VALUE;
772        }
773        if (b->release_fence != -1) {
774            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
775                    __func__, frameNumber, idx);
776            return BAD_VALUE;
777        }
778        if (b->buffer == NULL) {
779            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
780                    __func__, frameNumber, idx);
781            return BAD_VALUE;
782        }
783    }
784
785    // Validate all buffers
786    b = request->output_buffers;
787    do {
788        QCamera3Channel *channel =
789                static_cast<QCamera3Channel*>(b->stream->priv);
790        if (channel == NULL) {
791            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
792                    __func__, frameNumber, idx);
793            return BAD_VALUE;
794        }
795        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
796            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
797                    __func__, frameNumber, idx);
798            return BAD_VALUE;
799        }
800        if (b->release_fence != -1) {
801            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
802                    __func__, frameNumber, idx);
803            return BAD_VALUE;
804        }
805        if (b->buffer == NULL) {
806            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
807                    __func__, frameNumber, idx);
808            return BAD_VALUE;
809        }
810        idx++;
811        b = request->output_buffers + idx;
812    } while (idx < (ssize_t)request->num_output_buffers);
813
814    return NO_ERROR;
815}
816
817/*===========================================================================
818 * FUNCTION   : deriveMinFrameDuration
819 *
820 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
821 *              on currently configured streams.
822 *
823 * PARAMETERS : NONE
824 *
825 * RETURN     : NONE
826 *
827 *==========================================================================*/
828void QCamera3HardwareInterface::deriveMinFrameDuration()
829{
830    int32_t maxJpegDimension, maxProcessedDimension;
831
832    maxJpegDimension = 0;
833    maxProcessedDimension = 0;
834
835    // Figure out maximum jpeg, processed, and raw dimensions
836    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
837        it != mStreamInfo.end(); it++) {
838
839        // Input stream doesn't have valid stream_type
840        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
841            continue;
842
843        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
844        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
845            if (dimension > maxJpegDimension)
846                maxJpegDimension = dimension;
847        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
848            if (dimension > maxProcessedDimension)
849                maxProcessedDimension = dimension;
850        }
851    }
852
853    //Assume all jpeg dimensions are in processed dimensions.
854    if (maxJpegDimension > maxProcessedDimension)
855        maxProcessedDimension = maxJpegDimension;
856
857    //Find minimum durations for processed, jpeg, and raw
858    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
859    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
860        if (maxProcessedDimension ==
861            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
862            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
863            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
864            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
865            break;
866        }
867    }
868}
869
870/*===========================================================================
871 * FUNCTION   : getMinFrameDuration
872 *
873 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
874 *              and current request configuration.
875 *
876 * PARAMETERS : @request: requset sent by the frameworks
877 *
878 * RETURN     : min farme duration for a particular request
879 *
880 *==========================================================================*/
881int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
882{
883    bool hasJpegStream = false;
884    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
885        const camera3_stream_t *stream = request->output_buffers[i].stream;
886        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
887            hasJpegStream = true;
888    }
889
890    if (!hasJpegStream)
891        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
892    else
893        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
894}
895
896/*===========================================================================
897 * FUNCTION   : registerStreamBuffers
898 *
899 * DESCRIPTION: Register buffers for a given stream with the HAL device.
900 *
901 * PARAMETERS :
902 *   @stream_list : streams to be configured
903 *
904 * RETURN     :
905 *
906 *==========================================================================*/
907int QCamera3HardwareInterface::registerStreamBuffers(
908        const camera3_stream_buffer_set_t *buffer_set)
909{
910    int rc = 0;
911
912    pthread_mutex_lock(&mMutex);
913
914    if (buffer_set == NULL) {
915        ALOGE("%s: Invalid buffer_set parameter.", __func__);
916        pthread_mutex_unlock(&mMutex);
917        return -EINVAL;
918    }
919    if (buffer_set->stream == NULL) {
920        ALOGE("%s: Invalid stream parameter.", __func__);
921        pthread_mutex_unlock(&mMutex);
922        return -EINVAL;
923    }
924    if (buffer_set->num_buffers < 1) {
925        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
926        pthread_mutex_unlock(&mMutex);
927        return -EINVAL;
928    }
929    if (buffer_set->buffers == NULL) {
930        ALOGE("%s: Invalid buffers parameter.", __func__);
931        pthread_mutex_unlock(&mMutex);
932        return -EINVAL;
933    }
934
935    camera3_stream_t *stream = buffer_set->stream;
936    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
937
938    //set the buffer_set in the mStreamInfo array
939    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
940            it != mStreamInfo.end(); it++) {
941        if ((*it)->stream == stream) {
942            uint32_t numBuffers = buffer_set->num_buffers;
943            (*it)->buffer_set.stream = buffer_set->stream;
944            (*it)->buffer_set.num_buffers = numBuffers;
945            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
946            if ((*it)->buffer_set.buffers == NULL) {
947                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
948                pthread_mutex_unlock(&mMutex);
949                return -ENOMEM;
950            }
951            for (size_t j = 0; j < numBuffers; j++){
952                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
953            }
954            (*it)->registered = 1;
955        }
956    }
957    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
958    if (rc < 0) {
959        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
960        pthread_mutex_unlock(&mMutex);
961        return -ENODEV;
962    }
963
964    pthread_mutex_unlock(&mMutex);
965    return NO_ERROR;
966}
967
968/*===========================================================================
969 * FUNCTION   : processCaptureRequest
970 *
971 * DESCRIPTION: process a capture request from camera service
972 *
973 * PARAMETERS :
974 *   @request : request from framework to process
975 *
976 * RETURN     :
977 *
978 *==========================================================================*/
979int QCamera3HardwareInterface::processCaptureRequest(
980                    camera3_capture_request_t *request)
981{
982    int rc = NO_ERROR;
983    int32_t request_id;
984    CameraMetadata meta;
985    MetadataBufferInfo reproc_meta;
986    int queueMetadata = 0;
987
988    pthread_mutex_lock(&mMutex);
989
990    rc = validateCaptureRequest(request);
991    if (rc != NO_ERROR) {
992        ALOGE("%s: incoming request is not valid", __func__);
993        pthread_mutex_unlock(&mMutex);
994        return rc;
995    }
996
997    meta = request->settings;
998
999    // For first capture request, send capture intent, and
1000    // stream on all streams
1001    if (mFirstRequest) {
1002
1003        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1004            int32_t hal_version = CAM_HAL_V3;
1005            uint8_t captureIntent =
1006                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1007
1008            memset(mParameters, 0, sizeof(parm_buffer_t));
1009            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1010            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1011                sizeof(hal_version), &hal_version);
1012            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1013                sizeof(captureIntent), &captureIntent);
1014            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1015                mParameters);
1016        }
1017
1018        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1019            it != mStreamInfo.end(); it++) {
1020            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1021            channel->start();
1022        }
1023    }
1024
1025    uint32_t frameNumber = request->frame_number;
1026    uint32_t streamTypeMask = 0;
1027
1028    if (meta.exists(ANDROID_REQUEST_ID)) {
1029        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1030        mCurrentRequestId = request_id;
1031        ALOGV("%s: Received request with id: %d",__func__, request_id);
1032    } else if (mFirstRequest || mCurrentRequestId == -1){
1033        ALOGE("%s: Unable to find request id field, \
1034                & no previous id available", __func__);
1035        return NAME_NOT_FOUND;
1036    } else {
1037        ALOGV("%s: Re-using old request id", __func__);
1038        request_id = mCurrentRequestId;
1039    }
1040
1041    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1042                                    __func__, __LINE__,
1043                                    request->num_output_buffers,
1044                                    request->input_buffer,
1045                                    frameNumber);
1046    // Acquire all request buffers first
1047    int blob_request = 0;
1048    for (size_t i = 0; i < request->num_output_buffers; i++) {
1049        const camera3_stream_buffer_t& output = request->output_buffers[i];
1050        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1051        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1052
1053        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1054        //Call function to store local copy of jpeg data for encode params.
1055            blob_request = 1;
1056            rc = getJpegSettings(request->settings);
1057            if (rc < 0) {
1058                ALOGE("%s: failed to get jpeg parameters", __func__);
1059                pthread_mutex_unlock(&mMutex);
1060                return rc;
1061            }
1062        }
1063
1064        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1065        if (rc != OK) {
1066            ALOGE("%s: fence wait failed %d", __func__, rc);
1067            pthread_mutex_unlock(&mMutex);
1068            return rc;
1069        }
1070        streamTypeMask |= channel->getStreamTypeMask();
1071    }
1072
1073    rc = setFrameParameters(request, streamTypeMask);
1074    if (rc < 0) {
1075        ALOGE("%s: fail to set frame parameters", __func__);
1076        pthread_mutex_unlock(&mMutex);
1077        return rc;
1078    }
1079
1080    /* Update pending request list and pending buffers map */
1081    PendingRequestInfo pendingRequest;
1082    pendingRequest.frame_number = frameNumber;
1083    pendingRequest.num_buffers = request->num_output_buffers;
1084    pendingRequest.request_id = request_id;
1085    pendingRequest.blob_request = blob_request;
1086    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1087
1088    for (size_t i = 0; i < request->num_output_buffers; i++) {
1089        RequestedBufferInfo requestedBuf;
1090        requestedBuf.stream = request->output_buffers[i].stream;
1091        requestedBuf.buffer = NULL;
1092        pendingRequest.buffers.push_back(requestedBuf);
1093
1094        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1095    }
1096    mPendingRequestsList.push_back(pendingRequest);
1097
1098    // Notify metadata channel we receive a request
1099    mMetadataChannel->request(NULL, frameNumber);
1100
1101    // Call request on other streams
1102    for (size_t i = 0; i < request->num_output_buffers; i++) {
1103        const camera3_stream_buffer_t& output = request->output_buffers[i];
1104        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1105        mm_camera_buf_def_t *pInputBuffer = NULL;
1106
1107        if (channel == NULL) {
1108            ALOGE("%s: invalid channel pointer for stream", __func__);
1109            continue;
1110        }
1111
1112        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1113            QCamera3RegularChannel* inputChannel = NULL;
1114            if(request->input_buffer != NULL){
1115                //Try to get the internal format
1116                inputChannel = (QCamera3RegularChannel*)
1117                    request->input_buffer->stream->priv;
1118                if(inputChannel == NULL ){
1119                    ALOGE("%s: failed to get input channel handle", __func__);
1120                } else {
1121                    pInputBuffer =
1122                        inputChannel->getInternalFormatBuffer(
1123                                request->input_buffer->buffer);
1124                    ALOGD("%s: Input buffer dump",__func__);
1125                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1126                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1127                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1128                    ALOGD("Handle:%p", request->input_buffer->buffer);
1129                    //TODO: need to get corresponding metadata and send it to pproc
1130                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1131                         m != mStoredMetadataList.end(); m++) {
1132                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1133                            reproc_meta.meta_buf = m->meta_buf;
1134                            queueMetadata = 1;
1135                            break;
1136                        }
1137                    }
1138                }
1139            }
1140            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1141                            pInputBuffer,(QCamera3Channel*)inputChannel);
1142            if (queueMetadata) {
1143                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1144            }
1145        } else {
1146            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1147                __LINE__, output.buffer, frameNumber);
1148            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1149                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1150                     m != mStoredMetadataList.end(); m++) {
1151                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1152                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1153                            mMetadataChannel->bufDone(m->meta_buf);
1154                            free(m->meta_buf);
1155                            m = mStoredMetadataList.erase(m);
1156                            break;
1157                        }
1158                   }
1159                }
1160            }
1161            rc = channel->request(output.buffer, frameNumber);
1162        }
1163        if (rc < 0)
1164            ALOGE("%s: request failed", __func__);
1165    }
1166
1167    mFirstRequest = false;
1168
1169    //Block on conditional variable
1170    mPendingRequest = 1;
1171    while (mPendingRequest == 1) {
1172        pthread_cond_wait(&mRequestCond, &mMutex);
1173    }
1174
1175    pthread_mutex_unlock(&mMutex);
1176    return rc;
1177}
1178
1179/*===========================================================================
1180 * FUNCTION   : getMetadataVendorTagOps
1181 *
1182 * DESCRIPTION:
1183 *
1184 * PARAMETERS :
1185 *
1186 *
1187 * RETURN     :
1188 *==========================================================================*/
1189void QCamera3HardwareInterface::getMetadataVendorTagOps(
1190                    vendor_tag_query_ops_t* /*ops*/)
1191{
1192    /* Enable locks when we eventually add Vendor Tags */
1193    /*
1194    pthread_mutex_lock(&mMutex);
1195
1196    pthread_mutex_unlock(&mMutex);
1197    */
1198    return;
1199}
1200
1201/*===========================================================================
1202 * FUNCTION   : dump
1203 *
1204 * DESCRIPTION:
1205 *
1206 * PARAMETERS :
1207 *
1208 *
1209 * RETURN     :
1210 *==========================================================================*/
1211void QCamera3HardwareInterface::dump(int /*fd*/)
1212{
1213    /*Enable lock when we implement this function*/
1214    /*
1215    pthread_mutex_lock(&mMutex);
1216
1217    pthread_mutex_unlock(&mMutex);
1218    */
1219    return;
1220}
1221
1222/*===========================================================================
1223 * FUNCTION   : flush
1224 *
1225 * DESCRIPTION:
1226 *
1227 * PARAMETERS :
1228 *
1229 *
1230 * RETURN     :
1231 *==========================================================================*/
1232int QCamera3HardwareInterface::flush()
1233{
1234    /*Enable lock when we implement this function*/
1235    /*
1236    pthread_mutex_lock(&mMutex);
1237
1238    pthread_mutex_unlock(&mMutex);
1239    */
1240    return 0;
1241}
1242
1243/*===========================================================================
1244 * FUNCTION   : captureResultCb
1245 *
1246 * DESCRIPTION: Callback handler for all capture result
1247 *              (streams, as well as metadata)
1248 *
1249 * PARAMETERS :
1250 *   @metadata : metadata information
1251 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1252 *               NULL if metadata.
1253 *
1254 * RETURN     : NONE
1255 *==========================================================================*/
1256void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1257                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1258{
1259    pthread_mutex_lock(&mMutex);
1260
1261    if (metadata_buf) {
1262        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1263        int32_t frame_number_valid = *(int32_t *)
1264            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1265        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1266            CAM_INTF_META_PENDING_REQUESTS, metadata);
1267        uint32_t frame_number = *(uint32_t *)
1268            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1269        const struct timeval *tv = (const struct timeval *)
1270            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1271        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1272            tv->tv_usec * NSEC_PER_USEC;
1273
1274        if (!frame_number_valid) {
1275            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1276            mMetadataChannel->bufDone(metadata_buf);
1277            goto done_metadata;
1278        }
1279        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1280                frame_number, capture_time);
1281
1282        // Go through the pending requests info and send shutter/results to frameworks
1283        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1284                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1285            camera3_capture_result_t result;
1286            camera3_notify_msg_t notify_msg;
1287            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1288
1289            // Flush out all entries with less or equal frame numbers.
1290
1291            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1292            //Right now it's the same as metadata timestamp
1293
1294            //TODO: When there is metadata drop, how do we derive the timestamp of
1295            //dropped frames? For now, we fake the dropped timestamp by substracting
1296            //from the reported timestamp
1297            nsecs_t current_capture_time = capture_time -
1298                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1299
1300            // Send shutter notify to frameworks
1301            notify_msg.type = CAMERA3_MSG_SHUTTER;
1302            notify_msg.message.shutter.frame_number = i->frame_number;
1303            notify_msg.message.shutter.timestamp = current_capture_time;
1304            mCallbackOps->notify(mCallbackOps, &notify_msg);
1305            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1306                    i->frame_number, capture_time);
1307
1308            // Send empty metadata with already filled buffers for dropped metadata
1309            // and send valid metadata with already filled buffers for current metadata
1310            if (i->frame_number < frame_number) {
1311                CameraMetadata dummyMetadata;
1312                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1313                        &current_capture_time, 1);
1314                dummyMetadata.update(ANDROID_REQUEST_ID,
1315                        &(i->request_id), 1);
1316                result.result = dummyMetadata.release();
1317            } else {
1318                result.result = translateCbMetadataToResultMetadata(metadata,
1319                        current_capture_time, i->request_id);
1320                if (mIsZslMode) {
1321                   int found_metadata = 0;
1322                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1323                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1324                        j != i->buffers.end(); j++) {
1325                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1326                         //check if corresp. zsl already exists in the stored metadata list
1327                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1328                               m != mStoredMetadataList.begin(); m++) {
1329                            if (m->frame_number == frame_number) {
1330                               m->meta_buf = metadata_buf;
1331                               found_metadata = 1;
1332                               break;
1333                            }
1334                         }
1335                         if (!found_metadata) {
1336                            MetadataBufferInfo store_meta_info;
1337                            store_meta_info.meta_buf = metadata_buf;
1338                            store_meta_info.frame_number = frame_number;
1339                            mStoredMetadataList.push_back(store_meta_info);
1340                            found_metadata = 1;
1341                         }
1342                      }
1343                   }
1344                   if (!found_metadata) {
1345                       if (!i->input_buffer_present && i->blob_request) {
1346                          //livesnapshot or fallback non-zsl snapshot case
1347                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1348                                j != i->buffers.end(); j++){
1349                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1350                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1351                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1352                                 break;
1353                              }
1354                         }
1355                       } else {
1356                            //return the metadata immediately
1357                            mMetadataChannel->bufDone(metadata_buf);
1358                            free(metadata_buf);
1359                       }
1360                   }
1361               } else if (!mIsZslMode && i->blob_request) {
1362                   //If it is a blob request then send the metadata to the picture channel
1363                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1364               } else {
1365                   // Return metadata buffer
1366                   mMetadataChannel->bufDone(metadata_buf);
1367                   free(metadata_buf);
1368               }
1369
1370            }
1371            if (!result.result) {
1372                ALOGE("%s: metadata is NULL", __func__);
1373            }
1374            result.frame_number = i->frame_number;
1375            result.num_output_buffers = 0;
1376            result.output_buffers = NULL;
1377            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1378                    j != i->buffers.end(); j++) {
1379                if (j->buffer) {
1380                    result.num_output_buffers++;
1381                }
1382            }
1383
1384            if (result.num_output_buffers > 0) {
1385                camera3_stream_buffer_t *result_buffers =
1386                    new camera3_stream_buffer_t[result.num_output_buffers];
1387                if (!result_buffers) {
1388                    ALOGE("%s: Fatal error: out of memory", __func__);
1389                }
1390                size_t result_buffers_idx = 0;
1391                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1392                        j != i->buffers.end(); j++) {
1393                    if (j->buffer) {
1394                        result_buffers[result_buffers_idx++] = *(j->buffer);
1395                        free(j->buffer);
1396                        j->buffer = NULL;
1397                        mPendingBuffersMap.editValueFor(j->stream)--;
1398                    }
1399                }
1400                result.output_buffers = result_buffers;
1401
1402                mCallbackOps->process_capture_result(mCallbackOps, &result);
1403                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1404                        __func__, result.frame_number, current_capture_time);
1405                free_camera_metadata((camera_metadata_t *)result.result);
1406                delete[] result_buffers;
1407            } else {
1408                mCallbackOps->process_capture_result(mCallbackOps, &result);
1409                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1410                        __func__, result.frame_number, current_capture_time);
1411                free_camera_metadata((camera_metadata_t *)result.result);
1412            }
1413            // erase the element from the list
1414            i = mPendingRequestsList.erase(i);
1415        }
1416
1417
1418done_metadata:
1419        bool max_buffers_dequeued = false;
1420        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1421            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1422            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1423            if (queued_buffers == stream->max_buffers) {
1424                max_buffers_dequeued = true;
1425                break;
1426            }
1427        }
1428        if (!max_buffers_dequeued && !pending_requests) {
1429            // Unblock process_capture_request
1430            mPendingRequest = 0;
1431            pthread_cond_signal(&mRequestCond);
1432        }
1433    } else {
1434        // If the frame number doesn't exist in the pending request list,
1435        // directly send the buffer to the frameworks, and update pending buffers map
1436        // Otherwise, book-keep the buffer.
1437        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1438        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1439            i++;
1440        }
1441        if (i == mPendingRequestsList.end()) {
1442            // Verify all pending requests frame_numbers are greater
1443            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1444                    j != mPendingRequestsList.end(); j++) {
1445                if (j->frame_number < frame_number) {
1446                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1447                            __func__, j->frame_number, frame_number);
1448                }
1449            }
1450            camera3_capture_result_t result;
1451            result.result = NULL;
1452            result.frame_number = frame_number;
1453            result.num_output_buffers = 1;
1454            result.output_buffers = buffer;
1455            ALOGV("%s: result frame_number = %d, buffer = %p",
1456                    __func__, frame_number, buffer);
1457            mPendingBuffersMap.editValueFor(buffer->stream)--;
1458            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1459                int found = 0;
1460                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1461                      k != mStoredMetadataList.end(); k++) {
1462                    if (k->frame_number == frame_number) {
1463                        k->zsl_buf_hdl = buffer->buffer;
1464                        found = 1;
1465                        break;
1466                    }
1467                }
1468                if (!found) {
1469                   MetadataBufferInfo meta_info;
1470                   meta_info.frame_number = frame_number;
1471                   meta_info.zsl_buf_hdl = buffer->buffer;
1472                   mStoredMetadataList.push_back(meta_info);
1473                }
1474            }
1475            mCallbackOps->process_capture_result(mCallbackOps, &result);
1476        } else {
1477            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1478                    j != i->buffers.end(); j++) {
1479                if (j->stream == buffer->stream) {
1480                    if (j->buffer != NULL) {
1481                        ALOGE("%s: Error: buffer is already set", __func__);
1482                    } else {
1483                        j->buffer = (camera3_stream_buffer_t *)malloc(
1484                                sizeof(camera3_stream_buffer_t));
1485                        *(j->buffer) = *buffer;
1486                        ALOGV("%s: cache buffer %p at result frame_number %d",
1487                                __func__, buffer, frame_number);
1488                    }
1489                }
1490            }
1491        }
1492    }
1493    pthread_mutex_unlock(&mMutex);
1494    return;
1495}
1496
1497/*===========================================================================
1498 * FUNCTION   : translateCbMetadataToResultMetadata
1499 *
1500 * DESCRIPTION:
1501 *
1502 * PARAMETERS :
1503 *   @metadata : metadata information from callback
1504 *
1505 * RETURN     : camera_metadata_t*
1506 *              metadata in a format specified by fwk
1507 *==========================================================================*/
1508camera_metadata_t*
1509QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1510                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1511                                 int32_t request_id)
1512{
1513    CameraMetadata camMetadata;
1514    camera_metadata_t* resultMetadata;
1515
1516    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1517    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1518
1519    /*CAM_INTF_META_HISTOGRAM - TODO*/
1520    /*cam_hist_stats_t  *histogram =
1521      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1522      metadata);*/
1523
1524    /*face detection*/
1525    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1526        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1527    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1528    int32_t faceIds[numFaces];
1529    uint8_t faceScores[numFaces];
1530    int32_t faceRectangles[numFaces * 4];
1531    int32_t faceLandmarks[numFaces * 6];
1532    int j = 0, k = 0;
1533    for (int i = 0; i < numFaces; i++) {
1534        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1535        faceScores[i] = faceDetectionInfo->faces[i].score;
1536        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1537                faceRectangles+j, -1);
1538        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1539        j+= 4;
1540        k+= 6;
1541    }
1542    if (numFaces > 0) {
1543        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1544        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1545        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1546            faceRectangles, numFaces*4);
1547        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1548            faceLandmarks, numFaces*6);
1549    }
1550
1551    uint8_t  *color_correct_mode =
1552        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1553    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1554
1555    int32_t  *ae_precapture_id =
1556        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1557    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1558
1559    /*aec regions*/
1560    cam_area_t  *hAeRegions =
1561        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1562    int32_t aeRegions[5];
1563    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1564    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1565
1566    uint8_t *ae_state =
1567            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1568    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1569
1570    uint8_t  *focusMode =
1571        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1572    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1573
1574    /*af regions*/
1575    cam_area_t  *hAfRegions =
1576        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1577    int32_t afRegions[5];
1578    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1579    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1580
1581    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1582    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1583
1584    int32_t  *afTriggerId =
1585        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1586    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1587
1588    uint8_t  *whiteBalance =
1589        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1590    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1591
1592    /*awb regions*/
1593    cam_area_t  *hAwbRegions =
1594        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1595    int32_t awbRegions[5];
1596    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1597    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1598
1599    uint8_t  *whiteBalanceState =
1600        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1601    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1602
1603    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1604    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1605
1606    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1607    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1608
1609    uint8_t  *flashPower =
1610        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1611    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1612
1613    int64_t  *flashFiringTime =
1614        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1615    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1616
1617    /*int32_t  *ledMode =
1618      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1619      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1620
1621    uint8_t  *flashState =
1622        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1623    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1624
1625    uint8_t  *hotPixelMode =
1626        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1627    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1628
1629    float  *lensAperture =
1630        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1631    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1632
1633    float  *filterDensity =
1634        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1635    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1636
1637    float  *focalLength =
1638        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1639    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1640
1641    float  *focusDistance =
1642        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1643    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1644
1645    float  *focusRange =
1646        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1647    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1648
1649    uint8_t  *opticalStab =
1650        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1651    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1652
1653    /*int32_t  *focusState =
1654      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1655      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1656
1657    uint8_t  *noiseRedMode =
1658        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1659    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1660
1661    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1662
1663    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1664        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1665    int32_t scalerCropRegion[4];
1666    scalerCropRegion[0] = hScalerCropRegion->left;
1667    scalerCropRegion[1] = hScalerCropRegion->top;
1668    scalerCropRegion[2] = hScalerCropRegion->width;
1669    scalerCropRegion[3] = hScalerCropRegion->height;
1670    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1671
1672    int64_t  *sensorExpTime =
1673        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1674    mMetadataResponse.exposure_time = *sensorExpTime;
1675    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1676    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1677
1678    int64_t  *sensorFameDuration =
1679        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1680    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1681    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1682
1683    int32_t  *sensorSensitivity =
1684        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1685    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1686    mMetadataResponse.iso_speed = *sensorSensitivity;
1687    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1688
1689    uint8_t  *shadingMode =
1690        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1691    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1692
1693    uint8_t  *faceDetectMode =
1694        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1695    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1696        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1697        *faceDetectMode);
1698    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1699
1700    uint8_t  *histogramMode =
1701        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1702    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1703
1704    uint8_t  *sharpnessMapMode =
1705        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1706    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1707            sharpnessMapMode, 1);
1708
1709    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1710    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1711        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1712    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1713            (int32_t*)sharpnessMap->sharpness,
1714            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1715
1716    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1717        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1718    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1719    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1720    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1721                       (float*)lensShadingMap->lens_shading,
1722                       4*map_width*map_height);
1723
1724    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1725        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1726    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1727
1728    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1729        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1730    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1731                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1732
1733    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1734        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1735    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1736                       predColorCorrectionGains->gains, 4);
1737
1738    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1739        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1740    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1741                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1742
1743    uint8_t *blackLevelLock = (uint8_t*)
1744        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1745    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1746
1747    uint8_t *sceneFlicker = (uint8_t*)
1748        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1749    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1750
1751
1752    resultMetadata = camMetadata.release();
1753    return resultMetadata;
1754}
1755
1756/*===========================================================================
1757 * FUNCTION   : convertToRegions
1758 *
1759 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1760 *
1761 * PARAMETERS :
1762 *   @rect   : cam_rect_t struct to convert
1763 *   @region : int32_t destination array
1764 *   @weight : if we are converting from cam_area_t, weight is valid
1765 *             else weight = -1
1766 *
1767 *==========================================================================*/
1768void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1769    region[0] = rect.left;
1770    region[1] = rect.top;
1771    region[2] = rect.left + rect.width;
1772    region[3] = rect.top + rect.height;
1773    if (weight > -1) {
1774        region[4] = weight;
1775    }
1776}
1777
1778/*===========================================================================
1779 * FUNCTION   : convertFromRegions
1780 *
1781 * DESCRIPTION: helper method to convert from array to cam_rect_t
1782 *
1783 * PARAMETERS :
1784 *   @rect   : cam_rect_t struct to convert
1785 *   @region : int32_t destination array
1786 *   @weight : if we are converting from cam_area_t, weight is valid
1787 *             else weight = -1
1788 *
1789 *==========================================================================*/
1790void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1791                                                   const camera_metadata_t *settings,
1792                                                   uint32_t tag){
1793    CameraMetadata frame_settings;
1794    frame_settings = settings;
1795    int32_t x_min = frame_settings.find(tag).data.i32[0];
1796    int32_t y_min = frame_settings.find(tag).data.i32[1];
1797    int32_t x_max = frame_settings.find(tag).data.i32[2];
1798    int32_t y_max = frame_settings.find(tag).data.i32[3];
1799    roi->weight = frame_settings.find(tag).data.i32[4];
1800    roi->rect.left = x_min;
1801    roi->rect.top = y_min;
1802    roi->rect.width = x_max - x_min;
1803    roi->rect.height = y_max - y_min;
1804}
1805
1806/*===========================================================================
1807 * FUNCTION   : resetIfNeededROI
1808 *
1809 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1810 *              crop region
1811 *
1812 * PARAMETERS :
1813 *   @roi       : cam_area_t struct to resize
1814 *   @scalerCropRegion : cam_crop_region_t region to compare against
1815 *
1816 *
1817 *==========================================================================*/
1818bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1819                                                 const cam_crop_region_t* scalerCropRegion)
1820{
1821    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1822    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1823    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1824    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1825    if ((roi_x_max < scalerCropRegion->left) ||
1826        (roi_y_max < scalerCropRegion->top)  ||
1827        (roi->rect.left > crop_x_max) ||
1828        (roi->rect.top > crop_y_max)){
1829        return false;
1830    }
1831    if (roi->rect.left < scalerCropRegion->left) {
1832        roi->rect.left = scalerCropRegion->left;
1833    }
1834    if (roi->rect.top < scalerCropRegion->top) {
1835        roi->rect.top = scalerCropRegion->top;
1836    }
1837    if (roi_x_max > crop_x_max) {
1838        roi_x_max = crop_x_max;
1839    }
1840    if (roi_y_max > crop_y_max) {
1841        roi_y_max = crop_y_max;
1842    }
1843    roi->rect.width = roi_x_max - roi->rect.left;
1844    roi->rect.height = roi_y_max - roi->rect.top;
1845    return true;
1846}
1847
1848/*===========================================================================
1849 * FUNCTION   : convertLandmarks
1850 *
1851 * DESCRIPTION: helper method to extract the landmarks from face detection info
1852 *
1853 * PARAMETERS :
1854 *   @face   : cam_rect_t struct to convert
1855 *   @landmarks : int32_t destination array
1856 *
1857 *
1858 *==========================================================================*/
1859void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1860{
1861    landmarks[0] = face.left_eye_center.x;
1862    landmarks[1] = face.left_eye_center.y;
1863    landmarks[2] = face.right_eye_center.y;
1864    landmarks[3] = face.right_eye_center.y;
1865    landmarks[4] = face.mouth_center.x;
1866    landmarks[5] = face.mouth_center.y;
1867}
1868
1869#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1870/*===========================================================================
1871 * FUNCTION   : initCapabilities
1872 *
1873 * DESCRIPTION: initialize camera capabilities in static data struct
1874 *
1875 * PARAMETERS :
1876 *   @cameraId  : camera Id
1877 *
1878 * RETURN     : int32_t type of status
1879 *              NO_ERROR  -- success
1880 *              none-zero failure code
1881 *==========================================================================*/
1882int QCamera3HardwareInterface::initCapabilities(int cameraId)
1883{
1884    int rc = 0;
1885    mm_camera_vtbl_t *cameraHandle = NULL;
1886    QCamera3HeapMemory *capabilityHeap = NULL;
1887
1888    cameraHandle = camera_open(cameraId);
1889    if (!cameraHandle) {
1890        ALOGE("%s: camera_open failed", __func__);
1891        rc = -1;
1892        goto open_failed;
1893    }
1894
1895    capabilityHeap = new QCamera3HeapMemory();
1896    if (capabilityHeap == NULL) {
1897        ALOGE("%s: creation of capabilityHeap failed", __func__);
1898        goto heap_creation_failed;
1899    }
1900    /* Allocate memory for capability buffer */
1901    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1902    if(rc != OK) {
1903        ALOGE("%s: No memory for cappability", __func__);
1904        goto allocate_failed;
1905    }
1906
1907    /* Map memory for capability buffer */
1908    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1909    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1910                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1911                                capabilityHeap->getFd(0),
1912                                sizeof(cam_capability_t));
1913    if(rc < 0) {
1914        ALOGE("%s: failed to map capability buffer", __func__);
1915        goto map_failed;
1916    }
1917
1918    /* Query Capability */
1919    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1920    if(rc < 0) {
1921        ALOGE("%s: failed to query capability",__func__);
1922        goto query_failed;
1923    }
1924    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1925    if (!gCamCapability[cameraId]) {
1926        ALOGE("%s: out of memory", __func__);
1927        goto query_failed;
1928    }
1929    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1930                                        sizeof(cam_capability_t));
1931    rc = 0;
1932
1933query_failed:
1934    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1935                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1936map_failed:
1937    capabilityHeap->deallocate();
1938allocate_failed:
1939    delete capabilityHeap;
1940heap_creation_failed:
1941    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1942    cameraHandle = NULL;
1943open_failed:
1944    return rc;
1945}
1946
1947/*===========================================================================
1948 * FUNCTION   : initParameters
1949 *
1950 * DESCRIPTION: initialize camera parameters
1951 *
1952 * PARAMETERS :
1953 *
1954 * RETURN     : int32_t type of status
1955 *              NO_ERROR  -- success
1956 *              none-zero failure code
1957 *==========================================================================*/
1958int QCamera3HardwareInterface::initParameters()
1959{
1960    int rc = 0;
1961
1962    //Allocate Set Param Buffer
1963    mParamHeap = new QCamera3HeapMemory();
1964    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1965    if(rc != OK) {
1966        rc = NO_MEMORY;
1967        ALOGE("Failed to allocate SETPARM Heap memory");
1968        delete mParamHeap;
1969        mParamHeap = NULL;
1970        return rc;
1971    }
1972
1973    //Map memory for parameters buffer
1974    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1975            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1976            mParamHeap->getFd(0),
1977            sizeof(parm_buffer_t));
1978    if(rc < 0) {
1979        ALOGE("%s:failed to map SETPARM buffer",__func__);
1980        rc = FAILED_TRANSACTION;
1981        mParamHeap->deallocate();
1982        delete mParamHeap;
1983        mParamHeap = NULL;
1984        return rc;
1985    }
1986
1987    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1988    return rc;
1989}
1990
1991/*===========================================================================
1992 * FUNCTION   : deinitParameters
1993 *
1994 * DESCRIPTION: de-initialize camera parameters
1995 *
1996 * PARAMETERS :
1997 *
1998 * RETURN     : NONE
1999 *==========================================================================*/
2000void QCamera3HardwareInterface::deinitParameters()
2001{
2002    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2003            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2004
2005    mParamHeap->deallocate();
2006    delete mParamHeap;
2007    mParamHeap = NULL;
2008
2009    mParameters = NULL;
2010}
2011
2012/*===========================================================================
2013 * FUNCTION   : calcMaxJpegSize
2014 *
2015 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2016 *
2017 * PARAMETERS :
2018 *
2019 * RETURN     : max_jpeg_size
2020 *==========================================================================*/
2021int QCamera3HardwareInterface::calcMaxJpegSize()
2022{
2023    int32_t max_jpeg_size = 0;
2024    int temp_width, temp_height;
2025    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2026        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2027        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2028        if (temp_width * temp_height > max_jpeg_size ) {
2029            max_jpeg_size = temp_width * temp_height;
2030        }
2031    }
2032    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2033    return max_jpeg_size;
2034}
2035
2036/*===========================================================================
2037 * FUNCTION   : initStaticMetadata
2038 *
2039 * DESCRIPTION: initialize the static metadata
2040 *
2041 * PARAMETERS :
2042 *   @cameraId  : camera Id
2043 *
2044 * RETURN     : int32_t type of status
2045 *              0  -- success
2046 *              non-zero failure code
2047 *==========================================================================*/
2048int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2049{
2050    int rc = 0;
2051    CameraMetadata staticInfo;
2052
2053    /* android.info: hardware level */
2054    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2055    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2056        &supportedHardwareLevel, 1);
2057
2058    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2059    /*HAL 3 only*/
2060    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2061                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2062
2063    /*hard coded for now but this should come from sensor*/
2064    float min_focus_distance;
2065    if(facingBack){
2066        min_focus_distance = 10;
2067    } else {
2068        min_focus_distance = 0;
2069    }
2070    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2071                    &min_focus_distance, 1);
2072
2073    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2074                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2075
2076    /*should be using focal lengths but sensor doesn't provide that info now*/
2077    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2078                      &gCamCapability[cameraId]->focal_length,
2079                      1);
2080
2081    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2082                      gCamCapability[cameraId]->apertures,
2083                      gCamCapability[cameraId]->apertures_count);
2084
2085    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2086                gCamCapability[cameraId]->filter_densities,
2087                gCamCapability[cameraId]->filter_densities_count);
2088
2089
2090    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2091                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2092                      gCamCapability[cameraId]->optical_stab_modes_count);
2093
2094    staticInfo.update(ANDROID_LENS_POSITION,
2095                      gCamCapability[cameraId]->lens_position,
2096                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2097
2098    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2099                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2100    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2101                      lens_shading_map_size,
2102                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2103
2104    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2105                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2106    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2107            geo_correction_map_size,
2108            sizeof(geo_correction_map_size)/sizeof(int32_t));
2109
2110    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2111                       gCamCapability[cameraId]->geo_correction_map,
2112                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2113
2114    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2115            gCamCapability[cameraId]->sensor_physical_size, 2);
2116
2117    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2118            gCamCapability[cameraId]->exposure_time_range, 2);
2119
2120    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2121            &gCamCapability[cameraId]->max_frame_duration, 1);
2122
2123
2124    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2125                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2126
2127    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2128                                               gCamCapability[cameraId]->pixel_array_size.height};
2129    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2130                      pixel_array_size, 2);
2131
2132    int32_t active_array_size[] = {0, 0,
2133                                                gCamCapability[cameraId]->active_array_size.width,
2134                                                gCamCapability[cameraId]->active_array_size.height};
2135    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2136                      active_array_size, 4);
2137
2138    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2139            &gCamCapability[cameraId]->white_level, 1);
2140
2141    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2142            gCamCapability[cameraId]->black_level_pattern, 4);
2143
2144    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2145                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2146
2147    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2148                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2149
2150    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2151                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2152
2153    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2154                      &gCamCapability[cameraId]->histogram_size, 1);
2155
2156    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2157            &gCamCapability[cameraId]->max_histogram_count, 1);
2158
2159    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2160                                                gCamCapability[cameraId]->sharpness_map_size.height};
2161
2162    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2163            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2164
2165    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2166            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2167
2168
2169    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2170                      &gCamCapability[cameraId]->raw_min_duration,
2171                       1);
2172
2173    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2174                                                HAL_PIXEL_FORMAT_BLOB};
2175    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2176    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2177                      scalar_formats,
2178                      scalar_formats_count);
2179
2180    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2181    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2182              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2183              available_processed_sizes);
2184    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2185                available_processed_sizes,
2186                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2187
2188    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2189                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2190                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2191
2192    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2193    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2194                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2195                 available_fps_ranges);
2196    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2197            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2198
2199    camera_metadata_rational exposureCompensationStep = {
2200            gCamCapability[cameraId]->exp_compensation_step.numerator,
2201            gCamCapability[cameraId]->exp_compensation_step.denominator};
2202    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2203                      &exposureCompensationStep, 1);
2204
2205    /*TO DO*/
2206    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2207    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2208                      availableVstabModes, sizeof(availableVstabModes));
2209
2210    /*HAL 1 and HAL 3 common*/
2211    float maxZoom = 4;
2212    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2213            &maxZoom, 1);
2214
2215    int32_t max3aRegions = 1;
2216    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2217            &max3aRegions, 1);
2218
2219    uint8_t availableFaceDetectModes[] = {
2220            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2221            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2222    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2223                      availableFaceDetectModes,
2224                      sizeof(availableFaceDetectModes));
2225
2226    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2227                                       gCamCapability[cameraId]->raw_dim.height};
2228    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2229                      raw_size,
2230                      sizeof(raw_size)/sizeof(uint32_t));
2231
2232    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2233                                                        gCamCapability[cameraId]->exposure_compensation_max};
2234    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2235            exposureCompensationRange,
2236            sizeof(exposureCompensationRange)/sizeof(int32_t));
2237
2238    uint8_t lensFacing = (facingBack) ?
2239            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2240    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2241
2242    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2243                available_processed_sizes,
2244                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2245
2246    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2247                      available_thumbnail_sizes,
2248                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2249
2250    int32_t max_jpeg_size = 0;
2251    int temp_width, temp_height;
2252    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2253        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2254        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2255        if (temp_width * temp_height > max_jpeg_size ) {
2256            max_jpeg_size = temp_width * temp_height;
2257        }
2258    }
2259    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2260    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2261                      &max_jpeg_size, 1);
2262
2263    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2264    int32_t size = 0;
2265    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2266        int val = lookupFwkName(EFFECT_MODES_MAP,
2267                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2268                                   gCamCapability[cameraId]->supported_effects[i]);
2269        if (val != NAME_NOT_FOUND) {
2270            avail_effects[size] = (uint8_t)val;
2271            size++;
2272        }
2273    }
2274    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2275                      avail_effects,
2276                      size);
2277
2278    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2279    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2280    int32_t supported_scene_modes_cnt = 0;
2281    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2282        int val = lookupFwkName(SCENE_MODES_MAP,
2283                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2284                                gCamCapability[cameraId]->supported_scene_modes[i]);
2285        if (val != NAME_NOT_FOUND) {
2286            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2287            supported_indexes[supported_scene_modes_cnt] = i;
2288            supported_scene_modes_cnt++;
2289        }
2290    }
2291
2292    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2293                      avail_scene_modes,
2294                      supported_scene_modes_cnt);
2295
2296    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2297    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2298                      supported_scene_modes_cnt,
2299                      scene_mode_overrides,
2300                      supported_indexes,
2301                      cameraId);
2302    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2303                      scene_mode_overrides,
2304                      supported_scene_modes_cnt*3);
2305
2306    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2307    size = 0;
2308    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2309        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2310                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2311                                 gCamCapability[cameraId]->supported_antibandings[i]);
2312        if (val != NAME_NOT_FOUND) {
2313            avail_antibanding_modes[size] = (uint8_t)val;
2314            size++;
2315        }
2316
2317    }
2318    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2319                      avail_antibanding_modes,
2320                      size);
2321
2322    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2323    size = 0;
2324    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2325        int val = lookupFwkName(FOCUS_MODES_MAP,
2326                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2327                                gCamCapability[cameraId]->supported_focus_modes[i]);
2328        if (val != NAME_NOT_FOUND) {
2329            avail_af_modes[size] = (uint8_t)val;
2330            size++;
2331        }
2332    }
2333    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2334                      avail_af_modes,
2335                      size);
2336
2337    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2338    size = 0;
2339    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2340        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2341                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2342                                    gCamCapability[cameraId]->supported_white_balances[i]);
2343        if (val != NAME_NOT_FOUND) {
2344            avail_awb_modes[size] = (uint8_t)val;
2345            size++;
2346        }
2347    }
2348    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2349                      avail_awb_modes,
2350                      size);
2351
2352    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2353    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2354      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2355
2356    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2357            available_flash_levels,
2358            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2359
2360
2361    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2362    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2363            &flashAvailable, 1);
2364
2365    uint8_t avail_ae_modes[5];
2366    size = 0;
2367    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2368        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2369        size++;
2370    }
2371    if (flashAvailable) {
2372        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2373        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2374        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2375    }
2376    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2377                      avail_ae_modes,
2378                      size);
2379
2380    int32_t sensitivity_range[2];
2381    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2382    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2383    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2384                      sensitivity_range,
2385                      sizeof(sensitivity_range) / sizeof(int32_t));
2386
2387    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2388                      &gCamCapability[cameraId]->max_analog_sensitivity,
2389                      1);
2390
2391    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2392                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2393                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2394
2395    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2396    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2397                      &sensor_orientation,
2398                      1);
2399
2400    int32_t max_output_streams[3] = {1, 3, 1};
2401    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2402                      max_output_streams,
2403                      3);
2404
2405    gStaticMetadata[cameraId] = staticInfo.release();
2406    return rc;
2407}
2408
2409/*===========================================================================
2410 * FUNCTION   : makeTable
2411 *
2412 * DESCRIPTION: make a table of sizes
2413 *
2414 * PARAMETERS :
2415 *
2416 *
2417 *==========================================================================*/
2418void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2419                                          int32_t* sizeTable)
2420{
2421    int j = 0;
2422    for (int i = 0; i < size; i++) {
2423        sizeTable[j] = dimTable[i].width;
2424        sizeTable[j+1] = dimTable[i].height;
2425        j+=2;
2426    }
2427}
2428
2429/*===========================================================================
2430 * FUNCTION   : makeFPSTable
2431 *
2432 * DESCRIPTION: make a table of fps ranges
2433 *
2434 * PARAMETERS :
2435 *
2436 *==========================================================================*/
2437void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2438                                          int32_t* fpsRangesTable)
2439{
2440    int j = 0;
2441    for (int i = 0; i < size; i++) {
2442        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2443        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2444        j+=2;
2445    }
2446}
2447
2448/*===========================================================================
2449 * FUNCTION   : makeOverridesList
2450 *
2451 * DESCRIPTION: make a list of scene mode overrides
2452 *
2453 * PARAMETERS :
2454 *
2455 *
2456 *==========================================================================*/
2457void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2458                                                  uint8_t size, uint8_t* overridesList,
2459                                                  uint8_t* supported_indexes,
2460                                                  int camera_id)
2461{
2462    /*daemon will give a list of overrides for all scene modes.
2463      However we should send the fwk only the overrides for the scene modes
2464      supported by the framework*/
2465    int j = 0, index = 0, supt = 0;
2466    uint8_t focus_override;
2467    for (int i = 0; i < size; i++) {
2468        supt = 0;
2469        index = supported_indexes[i];
2470        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2471        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2472                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2473                                                    overridesTable[index].awb_mode);
2474        focus_override = (uint8_t)overridesTable[index].af_mode;
2475        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2476           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2477              supt = 1;
2478              break;
2479           }
2480        }
2481        if (supt) {
2482           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2483                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2484                                              focus_override);
2485        } else {
2486           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2487        }
2488        j+=3;
2489    }
2490}
2491
2492/*===========================================================================
2493 * FUNCTION   : getPreviewHalPixelFormat
2494 *
2495 * DESCRIPTION: convert the format to type recognized by framework
2496 *
2497 * PARAMETERS : format : the format from backend
2498 *
2499 ** RETURN    : format recognized by framework
2500 *
2501 *==========================================================================*/
2502int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2503{
2504    int32_t halPixelFormat;
2505
2506    switch (format) {
2507    case CAM_FORMAT_YUV_420_NV12:
2508        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2509        break;
2510    case CAM_FORMAT_YUV_420_NV21:
2511        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2512        break;
2513    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2514        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2515        break;
2516    case CAM_FORMAT_YUV_420_YV12:
2517        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2518        break;
2519    case CAM_FORMAT_YUV_422_NV16:
2520    case CAM_FORMAT_YUV_422_NV61:
2521    default:
2522        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2523        break;
2524    }
2525    return halPixelFormat;
2526}
2527
2528/*===========================================================================
2529 * FUNCTION   : getSensorSensitivity
2530 *
2531 * DESCRIPTION: convert iso_mode to an integer value
2532 *
2533 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2534 *
2535 ** RETURN    : sensitivity supported by sensor
2536 *
2537 *==========================================================================*/
2538int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2539{
2540    int32_t sensitivity;
2541
2542    switch (iso_mode) {
2543    case CAM_ISO_MODE_100:
2544        sensitivity = 100;
2545        break;
2546    case CAM_ISO_MODE_200:
2547        sensitivity = 200;
2548        break;
2549    case CAM_ISO_MODE_400:
2550        sensitivity = 400;
2551        break;
2552    case CAM_ISO_MODE_800:
2553        sensitivity = 800;
2554        break;
2555    case CAM_ISO_MODE_1600:
2556        sensitivity = 1600;
2557        break;
2558    default:
2559        sensitivity = -1;
2560        break;
2561    }
2562    return sensitivity;
2563}
2564
2565
2566/*===========================================================================
2567 * FUNCTION   : AddSetParmEntryToBatch
2568 *
2569 * DESCRIPTION: add set parameter entry into batch
2570 *
2571 * PARAMETERS :
2572 *   @p_table     : ptr to parameter buffer
2573 *   @paramType   : parameter type
2574 *   @paramLength : length of parameter value
2575 *   @paramValue  : ptr to parameter value
2576 *
2577 * RETURN     : int32_t type of status
2578 *              NO_ERROR  -- success
2579 *              none-zero failure code
2580 *==========================================================================*/
2581int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2582                                                          cam_intf_parm_type_t paramType,
2583                                                          uint32_t paramLength,
2584                                                          void *paramValue)
2585{
2586    int position = paramType;
2587    int current, next;
2588
2589    /*************************************************************************
2590    *                 Code to take care of linking next flags                *
2591    *************************************************************************/
2592    current = GET_FIRST_PARAM_ID(p_table);
2593    if (position == current){
2594        //DO NOTHING
2595    } else if (position < current){
2596        SET_NEXT_PARAM_ID(position, p_table, current);
2597        SET_FIRST_PARAM_ID(p_table, position);
2598    } else {
2599        /* Search for the position in the linked list where we need to slot in*/
2600        while (position > GET_NEXT_PARAM_ID(current, p_table))
2601            current = GET_NEXT_PARAM_ID(current, p_table);
2602
2603        /*If node already exists no need to alter linking*/
2604        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2605            next = GET_NEXT_PARAM_ID(current, p_table);
2606            SET_NEXT_PARAM_ID(current, p_table, position);
2607            SET_NEXT_PARAM_ID(position, p_table, next);
2608        }
2609    }
2610
2611    /*************************************************************************
2612    *                   Copy contents into entry                             *
2613    *************************************************************************/
2614
2615    if (paramLength > sizeof(parm_type_t)) {
2616        ALOGE("%s:Size of input larger than max entry size",__func__);
2617        return BAD_VALUE;
2618    }
2619    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2620    return NO_ERROR;
2621}
2622
2623/*===========================================================================
2624 * FUNCTION   : lookupFwkName
2625 *
2626 * DESCRIPTION: In case the enum is not same in fwk and backend
2627 *              make sure the parameter is correctly propogated
2628 *
2629 * PARAMETERS  :
2630 *   @arr      : map between the two enums
2631 *   @len      : len of the map
2632 *   @hal_name : name of the hal_parm to map
2633 *
2634 * RETURN     : int type of status
2635 *              fwk_name  -- success
2636 *              none-zero failure code
2637 *==========================================================================*/
2638int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2639                                             int len, int hal_name)
2640{
2641
2642    for (int i = 0; i < len; i++) {
2643        if (arr[i].hal_name == hal_name)
2644            return arr[i].fwk_name;
2645    }
2646
2647    /* Not able to find matching framework type is not necessarily
2648     * an error case. This happens when mm-camera supports more attributes
2649     * than the frameworks do */
2650    ALOGD("%s: Cannot find matching framework type", __func__);
2651    return NAME_NOT_FOUND;
2652}
2653
2654/*===========================================================================
2655 * FUNCTION   : lookupHalName
2656 *
2657 * DESCRIPTION: In case the enum is not same in fwk and backend
2658 *              make sure the parameter is correctly propogated
2659 *
2660 * PARAMETERS  :
2661 *   @arr      : map between the two enums
2662 *   @len      : len of the map
2663 *   @fwk_name : name of the hal_parm to map
2664 *
2665 * RETURN     : int32_t type of status
2666 *              hal_name  -- success
2667 *              none-zero failure code
2668 *==========================================================================*/
2669int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2670                                             int len, int fwk_name)
2671{
2672    for (int i = 0; i < len; i++) {
2673       if (arr[i].fwk_name == fwk_name)
2674           return arr[i].hal_name;
2675    }
2676    ALOGE("%s: Cannot find matching hal type", __func__);
2677    return NAME_NOT_FOUND;
2678}
2679
2680/*===========================================================================
2681 * FUNCTION   : getCapabilities
2682 *
2683 * DESCRIPTION: query camera capabilities
2684 *
2685 * PARAMETERS :
2686 *   @cameraId  : camera Id
2687 *   @info      : camera info struct to be filled in with camera capabilities
2688 *
2689 * RETURN     : int32_t type of status
2690 *              NO_ERROR  -- success
2691 *              none-zero failure code
2692 *==========================================================================*/
2693int QCamera3HardwareInterface::getCamInfo(int cameraId,
2694                                    struct camera_info *info)
2695{
2696    int rc = 0;
2697
2698    if (NULL == gCamCapability[cameraId]) {
2699        rc = initCapabilities(cameraId);
2700        if (rc < 0) {
2701            //pthread_mutex_unlock(&g_camlock);
2702            return rc;
2703        }
2704    }
2705
2706    if (NULL == gStaticMetadata[cameraId]) {
2707        rc = initStaticMetadata(cameraId);
2708        if (rc < 0) {
2709            return rc;
2710        }
2711    }
2712
2713    switch(gCamCapability[cameraId]->position) {
2714    case CAM_POSITION_BACK:
2715        info->facing = CAMERA_FACING_BACK;
2716        break;
2717
2718    case CAM_POSITION_FRONT:
2719        info->facing = CAMERA_FACING_FRONT;
2720        break;
2721
2722    default:
2723        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2724        rc = -1;
2725        break;
2726    }
2727
2728
2729    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2730    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2731    info->static_camera_characteristics = gStaticMetadata[cameraId];
2732
2733    return rc;
2734}
2735
2736/*===========================================================================
2737 * FUNCTION   : translateMetadata
2738 *
2739 * DESCRIPTION: translate the metadata into camera_metadata_t
2740 *
2741 * PARAMETERS : type of the request
2742 *
2743 *
2744 * RETURN     : success: camera_metadata_t*
2745 *              failure: NULL
2746 *
2747 *==========================================================================*/
2748camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2749{
2750    pthread_mutex_lock(&mMutex);
2751
2752    if (mDefaultMetadata[type] != NULL) {
2753        pthread_mutex_unlock(&mMutex);
2754        return mDefaultMetadata[type];
2755    }
2756    //first time we are handling this request
2757    //fill up the metadata structure using the wrapper class
2758    CameraMetadata settings;
2759    //translate from cam_capability_t to camera_metadata_tag_t
2760    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2761    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2762
2763    /*control*/
2764
2765    uint8_t controlIntent = 0;
2766    switch (type) {
2767      case CAMERA3_TEMPLATE_PREVIEW:
2768        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2769        break;
2770      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2771        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2772        break;
2773      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2774        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2775        break;
2776      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2777        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2778        break;
2779      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2780        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2781        break;
2782      default:
2783        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2784        break;
2785    }
2786    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2787
2788    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2789            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2790
2791    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2792    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2793
2794    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2795    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2796
2797    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2798    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2799
2800    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2801    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2802
2803    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2804    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2805
2806    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2807    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2808
2809    static uint8_t focusMode;
2810    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2811        ALOGE("%s: Setting focus mode to auto", __func__);
2812        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2813    } else {
2814        ALOGE("%s: Setting focus mode to off", __func__);
2815        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2816    }
2817    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2818
2819    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2820    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2821
2822    /*flash*/
2823    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2824    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2825
2826    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2827    settings.update(ANDROID_FLASH_FIRING_POWER,
2828            &flashFiringLevel, 1);
2829
2830    /* lens */
2831    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2832    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2833
2834    if (gCamCapability[mCameraId]->filter_densities_count) {
2835        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2836        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2837                        gCamCapability[mCameraId]->filter_densities_count);
2838    }
2839
2840    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2841    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2842
2843    /* frame duration */
2844    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
2845    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2846
2847    /* sensitivity */
2848    static const int32_t default_sensitivity = 100;
2849    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2850
2851    /*edge mode*/
2852    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2853    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
2854
2855    /*noise reduction mode*/
2856    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2857    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
2858
2859    /*color correction mode*/
2860    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2861    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
2862
2863    /*transform matrix mode*/
2864    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2865    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
2866
2867    mDefaultMetadata[type] = settings.release();
2868
2869    pthread_mutex_unlock(&mMutex);
2870    return mDefaultMetadata[type];
2871}
2872
2873/*===========================================================================
2874 * FUNCTION   : setFrameParameters
2875 *
2876 * DESCRIPTION: set parameters per frame as requested in the metadata from
2877 *              framework
2878 *
2879 * PARAMETERS :
2880 *   @request   : request that needs to be serviced
2881 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2882 *
2883 * RETURN     : success: NO_ERROR
2884 *              failure:
2885 *==========================================================================*/
2886int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2887                    uint32_t streamTypeMask)
2888{
2889    /*translate from camera_metadata_t type to parm_type_t*/
2890    int rc = 0;
2891    if (request->settings == NULL && mFirstRequest) {
2892        /*settings cannot be null for the first request*/
2893        return BAD_VALUE;
2894    }
2895
2896    int32_t hal_version = CAM_HAL_V3;
2897
2898    memset(mParameters, 0, sizeof(parm_buffer_t));
2899    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2900    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2901                sizeof(hal_version), &hal_version);
2902    if (rc < 0) {
2903        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2904        return BAD_VALUE;
2905    }
2906
2907    /*we need to update the frame number in the parameters*/
2908    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2909                                sizeof(request->frame_number), &(request->frame_number));
2910    if (rc < 0) {
2911        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2912        return BAD_VALUE;
2913    }
2914
2915    /* Update stream id mask where buffers are requested */
2916    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2917                                sizeof(streamTypeMask), &streamTypeMask);
2918    if (rc < 0) {
2919        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2920        return BAD_VALUE;
2921    }
2922
2923    if(request->settings != NULL){
2924        rc = translateMetadataToParameters(request);
2925    }
2926    /*set the parameters to backend*/
2927    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2928    return rc;
2929}
2930
2931/*===========================================================================
2932 * FUNCTION   : translateMetadataToParameters
2933 *
2934 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2935 *
2936 *
2937 * PARAMETERS :
2938 *   @request  : request sent from framework
2939 *
2940 *
2941 * RETURN     : success: NO_ERROR
2942 *              failure:
2943 *==========================================================================*/
2944int QCamera3HardwareInterface::translateMetadataToParameters
2945                                  (const camera3_capture_request_t *request)
2946{
2947    int rc = 0;
2948    CameraMetadata frame_settings;
2949    frame_settings = request->settings;
2950
2951    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2952        int32_t antibandingMode =
2953            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2954        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2955                sizeof(antibandingMode), &antibandingMode);
2956    }
2957
2958    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2959        int32_t expCompensation = frame_settings.find(
2960            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2961        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2962            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2963        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2964            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2965        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2966          sizeof(expCompensation), &expCompensation);
2967    }
2968
2969    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2970        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2971        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2972                sizeof(aeLock), &aeLock);
2973    }
2974    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2975        cam_fps_range_t fps_range;
2976        fps_range.min_fps =
2977            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2978        fps_range.max_fps =
2979            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2980        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2981                sizeof(fps_range), &fps_range);
2982    }
2983
2984    float focalDistance = -1.0;
2985    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2986        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2987        rc = AddSetParmEntryToBatch(mParameters,
2988                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2989                sizeof(focalDistance), &focalDistance);
2990    }
2991
2992    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2993        uint8_t fwk_focusMode =
2994            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2995        uint8_t focusMode;
2996        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2997            focusMode = CAM_FOCUS_MODE_INFINITY;
2998        } else{
2999         focusMode = lookupHalName(FOCUS_MODES_MAP,
3000                                   sizeof(FOCUS_MODES_MAP),
3001                                   fwk_focusMode);
3002        }
3003        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3004                sizeof(focusMode), &focusMode);
3005    }
3006
3007    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3008        uint8_t awbLock =
3009            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3010        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3011                sizeof(awbLock), &awbLock);
3012    }
3013
3014    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3015        uint8_t fwk_whiteLevel =
3016            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3017        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3018                sizeof(WHITE_BALANCE_MODES_MAP),
3019                fwk_whiteLevel);
3020        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3021                sizeof(whiteLevel), &whiteLevel);
3022    }
3023
3024    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3025        uint8_t fwk_effectMode =
3026            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3027        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3028                sizeof(EFFECT_MODES_MAP),
3029                fwk_effectMode);
3030        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3031                sizeof(effectMode), &effectMode);
3032    }
3033
3034    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3035        uint8_t fwk_aeMode =
3036            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3037        uint8_t aeMode;
3038        int32_t redeye;
3039
3040        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3041            aeMode = CAM_AE_MODE_OFF;
3042        } else {
3043            aeMode = CAM_AE_MODE_ON;
3044        }
3045        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3046            redeye = 1;
3047        } else {
3048            redeye = 0;
3049        }
3050
3051        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3052                                          sizeof(AE_FLASH_MODE_MAP),
3053                                          fwk_aeMode);
3054        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3055                sizeof(aeMode), &aeMode);
3056        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3057                sizeof(flashMode), &flashMode);
3058        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3059                sizeof(redeye), &redeye);
3060    }
3061
3062    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3063        uint8_t colorCorrectMode =
3064            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3065        rc =
3066            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3067                    sizeof(colorCorrectMode), &colorCorrectMode);
3068    }
3069
3070    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3071        cam_color_correct_gains_t colorCorrectGains;
3072        for (int i = 0; i < 4; i++) {
3073            colorCorrectGains.gains[i] =
3074                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3075        }
3076        rc =
3077            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3078                    sizeof(colorCorrectGains), &colorCorrectGains);
3079    }
3080
3081    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3082        cam_color_correct_matrix_t colorCorrectTransform;
3083        cam_rational_type_t transform_elem;
3084        int num = 0;
3085        for (int i = 0; i < 3; i++) {
3086           for (int j = 0; j < 3; j++) {
3087              transform_elem.numerator =
3088                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3089              transform_elem.denominator =
3090                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3091              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3092              num++;
3093           }
3094        }
3095        rc =
3096            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3097                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3098    }
3099
3100    cam_trigger_t aecTrigger;
3101    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3102    aecTrigger.trigger_id = -1;
3103    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3104        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3105        aecTrigger.trigger =
3106            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3107        aecTrigger.trigger_id =
3108            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3109    }
3110    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3111                                sizeof(aecTrigger), &aecTrigger);
3112
3113    /*af_trigger must come with a trigger id*/
3114    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3115        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3116        cam_trigger_t af_trigger;
3117        af_trigger.trigger =
3118            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3119        af_trigger.trigger_id =
3120            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3121        rc = AddSetParmEntryToBatch(mParameters,
3122                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3123    }
3124
3125    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3126        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3127        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3128                sizeof(metaMode), &metaMode);
3129        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3130           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3131           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3132                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3133                                             fwk_sceneMode);
3134           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3135                sizeof(sceneMode), &sceneMode);
3136        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3137           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3138           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3139                sizeof(sceneMode), &sceneMode);
3140        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3141           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3142           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3143                sizeof(sceneMode), &sceneMode);
3144        }
3145    }
3146
3147    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3148        int32_t demosaic =
3149            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3150        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3151                sizeof(demosaic), &demosaic);
3152    }
3153
3154    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3155        cam_edge_application_t edge_application;
3156        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3157        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3158            edge_application.sharpness = 0;
3159        } else {
3160            edge_application.sharpness = 10;
3161        }
3162        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3163                sizeof(edge_application), &edge_application);
3164    }
3165
3166    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3167        int32_t edgeStrength =
3168            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3169        rc = AddSetParmEntryToBatch(mParameters,
3170                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3171    }
3172
3173    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3174        int32_t respectFlashMode = 1;
3175        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3176            uint8_t fwk_aeMode =
3177                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3178            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3179                respectFlashMode = 0;
3180                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3181                    __func__);
3182            }
3183        }
3184        if (respectFlashMode) {
3185            uint8_t flashMode =
3186                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3187            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3188                                          sizeof(FLASH_MODES_MAP),
3189                                          flashMode);
3190            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3191            // To check: CAM_INTF_META_FLASH_MODE usage
3192            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3193                          sizeof(flashMode), &flashMode);
3194        }
3195    }
3196
3197    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3198        uint8_t flashPower =
3199            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3200        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3201                sizeof(flashPower), &flashPower);
3202    }
3203
3204    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3205        int64_t flashFiringTime =
3206            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3207        rc = AddSetParmEntryToBatch(mParameters,
3208                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3209    }
3210
3211    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3212        uint8_t geometricMode =
3213            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3214        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3215                sizeof(geometricMode), &geometricMode);
3216    }
3217
3218    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3219        uint8_t geometricStrength =
3220            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3221        rc = AddSetParmEntryToBatch(mParameters,
3222                CAM_INTF_META_GEOMETRIC_STRENGTH,
3223                sizeof(geometricStrength), &geometricStrength);
3224    }
3225
3226    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3227        uint8_t hotPixelMode =
3228            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3229        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3230                sizeof(hotPixelMode), &hotPixelMode);
3231    }
3232
3233    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3234        float lensAperture =
3235            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3236        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3237                sizeof(lensAperture), &lensAperture);
3238    }
3239
3240    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3241        float filterDensity =
3242            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3243        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3244                sizeof(filterDensity), &filterDensity);
3245    }
3246
3247    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3248        float focalLength =
3249            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3250        rc = AddSetParmEntryToBatch(mParameters,
3251                CAM_INTF_META_LENS_FOCAL_LENGTH,
3252                sizeof(focalLength), &focalLength);
3253    }
3254
3255    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3256        uint8_t optStabMode =
3257            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3258        rc = AddSetParmEntryToBatch(mParameters,
3259                CAM_INTF_META_LENS_OPT_STAB_MODE,
3260                sizeof(optStabMode), &optStabMode);
3261    }
3262
3263    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3264        uint8_t noiseRedMode =
3265            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3266        rc = AddSetParmEntryToBatch(mParameters,
3267                CAM_INTF_META_NOISE_REDUCTION_MODE,
3268                sizeof(noiseRedMode), &noiseRedMode);
3269    }
3270
3271    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3272        uint8_t noiseRedStrength =
3273            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3274        rc = AddSetParmEntryToBatch(mParameters,
3275                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3276                sizeof(noiseRedStrength), &noiseRedStrength);
3277    }
3278
3279    cam_crop_region_t scalerCropRegion;
3280    bool scalerCropSet = false;
3281    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3282        scalerCropRegion.left =
3283            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3284        scalerCropRegion.top =
3285            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3286        scalerCropRegion.width =
3287            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3288        scalerCropRegion.height =
3289            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3290        rc = AddSetParmEntryToBatch(mParameters,
3291                CAM_INTF_META_SCALER_CROP_REGION,
3292                sizeof(scalerCropRegion), &scalerCropRegion);
3293        scalerCropSet = true;
3294    }
3295
3296    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3297        int64_t sensorExpTime =
3298            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3299        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3300        rc = AddSetParmEntryToBatch(mParameters,
3301                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3302                sizeof(sensorExpTime), &sensorExpTime);
3303    }
3304
3305    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3306        int64_t sensorFrameDuration =
3307            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3308        int64_t minFrameDuration = getMinFrameDuration(request);
3309        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3310        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3311            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3312        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3313        rc = AddSetParmEntryToBatch(mParameters,
3314                CAM_INTF_META_SENSOR_FRAME_DURATION,
3315                sizeof(sensorFrameDuration), &sensorFrameDuration);
3316    }
3317
3318    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3319        int32_t sensorSensitivity =
3320            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3321        if (sensorSensitivity <
3322                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3323            sensorSensitivity =
3324                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3325        if (sensorSensitivity >
3326                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3327            sensorSensitivity =
3328                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3329        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3330        rc = AddSetParmEntryToBatch(mParameters,
3331                CAM_INTF_META_SENSOR_SENSITIVITY,
3332                sizeof(sensorSensitivity), &sensorSensitivity);
3333    }
3334
3335    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3336        int32_t shadingMode =
3337            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3338        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3339                sizeof(shadingMode), &shadingMode);
3340    }
3341
3342    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3343        uint8_t shadingStrength =
3344            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3345        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3346                sizeof(shadingStrength), &shadingStrength);
3347    }
3348
3349    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3350        uint8_t fwk_facedetectMode =
3351            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3352        uint8_t facedetectMode =
3353            lookupHalName(FACEDETECT_MODES_MAP,
3354                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3355        rc = AddSetParmEntryToBatch(mParameters,
3356                CAM_INTF_META_STATS_FACEDETECT_MODE,
3357                sizeof(facedetectMode), &facedetectMode);
3358    }
3359
3360    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3361        uint8_t histogramMode =
3362            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3363        rc = AddSetParmEntryToBatch(mParameters,
3364                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3365                sizeof(histogramMode), &histogramMode);
3366    }
3367
3368    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3369        uint8_t sharpnessMapMode =
3370            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3371        rc = AddSetParmEntryToBatch(mParameters,
3372                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3373                sizeof(sharpnessMapMode), &sharpnessMapMode);
3374    }
3375
3376    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3377        uint8_t tonemapMode =
3378            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3379        rc = AddSetParmEntryToBatch(mParameters,
3380                CAM_INTF_META_TONEMAP_MODE,
3381                sizeof(tonemapMode), &tonemapMode);
3382    }
3383    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3384    /*All tonemap channels will have the same number of points*/
3385    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3386        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3387        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3388        cam_rgb_tonemap_curves tonemapCurves;
3389        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3390
3391        /* ch0 = G*/
3392        int point = 0;
3393        cam_tonemap_curve_t tonemapCurveGreen;
3394        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3395            for (int j = 0; j < 2; j++) {
3396               tonemapCurveGreen.tonemap_points[i][j] =
3397                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3398               point++;
3399            }
3400        }
3401        tonemapCurves.curves[0] = tonemapCurveGreen;
3402
3403        /* ch 1 = B */
3404        point = 0;
3405        cam_tonemap_curve_t tonemapCurveBlue;
3406        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3407            for (int j = 0; j < 2; j++) {
3408               tonemapCurveBlue.tonemap_points[i][j] =
3409                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3410               point++;
3411            }
3412        }
3413        tonemapCurves.curves[1] = tonemapCurveBlue;
3414
3415        /* ch 2 = R */
3416        point = 0;
3417        cam_tonemap_curve_t tonemapCurveRed;
3418        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3419            for (int j = 0; j < 2; j++) {
3420               tonemapCurveRed.tonemap_points[i][j] =
3421                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3422               point++;
3423            }
3424        }
3425        tonemapCurves.curves[2] = tonemapCurveRed;
3426
3427        rc = AddSetParmEntryToBatch(mParameters,
3428                CAM_INTF_META_TONEMAP_CURVES,
3429                sizeof(tonemapCurves), &tonemapCurves);
3430    }
3431
3432    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3433        uint8_t captureIntent =
3434            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3435        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3436                sizeof(captureIntent), &captureIntent);
3437    }
3438
3439    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3440        uint8_t blackLevelLock =
3441            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3442        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3443                sizeof(blackLevelLock), &blackLevelLock);
3444    }
3445
3446    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3447        uint8_t lensShadingMapMode =
3448            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3449        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3450                sizeof(lensShadingMapMode), &lensShadingMapMode);
3451    }
3452
3453    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3454        cam_area_t roi;
3455        bool reset = true;
3456        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3457        if (scalerCropSet) {
3458            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3459        }
3460        if (reset) {
3461            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3462                    sizeof(roi), &roi);
3463        }
3464    }
3465
3466    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3467        cam_area_t roi;
3468        bool reset = true;
3469        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3470        if (scalerCropSet) {
3471            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3472        }
3473        if (reset) {
3474            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3475                    sizeof(roi), &roi);
3476        }
3477    }
3478
3479    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3480        cam_area_t roi;
3481        bool reset = true;
3482        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3483        if (scalerCropSet) {
3484            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3485        }
3486        if (reset) {
3487            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3488                    sizeof(roi), &roi);
3489        }
3490    }
3491    return rc;
3492}
3493
3494/*===========================================================================
3495 * FUNCTION   : getJpegSettings
3496 *
3497 * DESCRIPTION: save the jpeg settings in the HAL
3498 *
3499 *
3500 * PARAMETERS :
3501 *   @settings  : frame settings information from framework
3502 *
3503 *
3504 * RETURN     : success: NO_ERROR
3505 *              failure:
3506 *==========================================================================*/
3507int QCamera3HardwareInterface::getJpegSettings
3508                                  (const camera_metadata_t *settings)
3509{
3510    if (mJpegSettings) {
3511        if (mJpegSettings->gps_timestamp) {
3512            free(mJpegSettings->gps_timestamp);
3513            mJpegSettings->gps_timestamp = NULL;
3514        }
3515        if (mJpegSettings->gps_coordinates) {
3516            for (int i = 0; i < 3; i++) {
3517                free(mJpegSettings->gps_coordinates[i]);
3518                mJpegSettings->gps_coordinates[i] = NULL;
3519            }
3520        }
3521        free(mJpegSettings);
3522        mJpegSettings = NULL;
3523    }
3524    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3525    CameraMetadata jpeg_settings;
3526    jpeg_settings = settings;
3527
3528    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3529        mJpegSettings->jpeg_orientation =
3530            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3531    } else {
3532        mJpegSettings->jpeg_orientation = 0;
3533    }
3534    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3535        mJpegSettings->jpeg_quality =
3536            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3537    } else {
3538        mJpegSettings->jpeg_quality = 85;
3539    }
3540    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3541        mJpegSettings->thumbnail_size.width =
3542            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3543        mJpegSettings->thumbnail_size.height =
3544            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3545    } else {
3546        mJpegSettings->thumbnail_size.width = 0;
3547        mJpegSettings->thumbnail_size.height = 0;
3548    }
3549    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3550        for (int i = 0; i < 3; i++) {
3551            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3552            *(mJpegSettings->gps_coordinates[i]) =
3553                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3554        }
3555    } else{
3556       for (int i = 0; i < 3; i++) {
3557            mJpegSettings->gps_coordinates[i] = NULL;
3558        }
3559    }
3560
3561    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3562        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3563        *(mJpegSettings->gps_timestamp) =
3564            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3565    } else {
3566        mJpegSettings->gps_timestamp = NULL;
3567    }
3568
3569    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3570        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3571        for (int i = 0; i < len; i++) {
3572            mJpegSettings->gps_processing_method[i] =
3573                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3574        }
3575        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3576            mJpegSettings->gps_processing_method[len] = '\0';
3577        }
3578    } else {
3579        mJpegSettings->gps_processing_method[0] = '\0';
3580    }
3581
3582    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3583        mJpegSettings->sensor_sensitivity =
3584            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3585    } else {
3586        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3587    }
3588
3589    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3590
3591    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3592        mJpegSettings->lens_focal_length =
3593            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3594    }
3595    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3596        mJpegSettings->exposure_compensation =
3597            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3598    }
3599    mJpegSettings->sharpness = 10; //default value
3600    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3601        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3602        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3603            mJpegSettings->sharpness = 0;
3604        }
3605    }
3606    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3607    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3608    mJpegSettings->is_jpeg_format = true;
3609    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3610    return 0;
3611}
3612
3613/*===========================================================================
3614 * FUNCTION   : captureResultCb
3615 *
3616 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3617 *
3618 * PARAMETERS :
3619 *   @frame  : frame information from mm-camera-interface
3620 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3621 *   @userdata: userdata
3622 *
3623 * RETURN     : NONE
3624 *==========================================================================*/
3625void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3626                camera3_stream_buffer_t *buffer,
3627                uint32_t frame_number, void *userdata)
3628{
3629    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3630    if (hw == NULL) {
3631        ALOGE("%s: Invalid hw %p", __func__, hw);
3632        return;
3633    }
3634
3635    hw->captureResultCb(metadata, buffer, frame_number);
3636    return;
3637}
3638
3639
3640/*===========================================================================
3641 * FUNCTION   : initialize
3642 *
3643 * DESCRIPTION: Pass framework callback pointers to HAL
3644 *
3645 * PARAMETERS :
3646 *
3647 *
3648 * RETURN     : Success : 0
3649 *              Failure: -ENODEV
3650 *==========================================================================*/
3651
3652int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3653                                  const camera3_callback_ops_t *callback_ops)
3654{
3655    ALOGV("%s: E", __func__);
3656    QCamera3HardwareInterface *hw =
3657        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3658    if (!hw) {
3659        ALOGE("%s: NULL camera device", __func__);
3660        return -ENODEV;
3661    }
3662
3663    int rc = hw->initialize(callback_ops);
3664    ALOGV("%s: X", __func__);
3665    return rc;
3666}
3667
3668/*===========================================================================
3669 * FUNCTION   : configure_streams
3670 *
3671 * DESCRIPTION:
3672 *
3673 * PARAMETERS :
3674 *
3675 *
3676 * RETURN     : Success: 0
3677 *              Failure: -EINVAL (if stream configuration is invalid)
3678 *                       -ENODEV (fatal error)
3679 *==========================================================================*/
3680
3681int QCamera3HardwareInterface::configure_streams(
3682        const struct camera3_device *device,
3683        camera3_stream_configuration_t *stream_list)
3684{
3685    ALOGV("%s: E", __func__);
3686    QCamera3HardwareInterface *hw =
3687        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3688    if (!hw) {
3689        ALOGE("%s: NULL camera device", __func__);
3690        return -ENODEV;
3691    }
3692    int rc = hw->configureStreams(stream_list);
3693    ALOGV("%s: X", __func__);
3694    return rc;
3695}
3696
3697/*===========================================================================
3698 * FUNCTION   : register_stream_buffers
3699 *
3700 * DESCRIPTION: Register stream buffers with the device
3701 *
3702 * PARAMETERS :
3703 *
3704 * RETURN     :
3705 *==========================================================================*/
3706int QCamera3HardwareInterface::register_stream_buffers(
3707        const struct camera3_device *device,
3708        const camera3_stream_buffer_set_t *buffer_set)
3709{
3710    ALOGV("%s: E", __func__);
3711    QCamera3HardwareInterface *hw =
3712        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3713    if (!hw) {
3714        ALOGE("%s: NULL camera device", __func__);
3715        return -ENODEV;
3716    }
3717    int rc = hw->registerStreamBuffers(buffer_set);
3718    ALOGV("%s: X", __func__);
3719    return rc;
3720}
3721
3722/*===========================================================================
3723 * FUNCTION   : construct_default_request_settings
3724 *
3725 * DESCRIPTION: Configure a settings buffer to meet the required use case
3726 *
3727 * PARAMETERS :
3728 *
3729 *
3730 * RETURN     : Success: Return valid metadata
3731 *              Failure: Return NULL
3732 *==========================================================================*/
3733const camera_metadata_t* QCamera3HardwareInterface::
3734    construct_default_request_settings(const struct camera3_device *device,
3735                                        int type)
3736{
3737
3738    ALOGV("%s: E", __func__);
3739    camera_metadata_t* fwk_metadata = NULL;
3740    QCamera3HardwareInterface *hw =
3741        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3742    if (!hw) {
3743        ALOGE("%s: NULL camera device", __func__);
3744        return NULL;
3745    }
3746
3747    fwk_metadata = hw->translateCapabilityToMetadata(type);
3748
3749    ALOGV("%s: X", __func__);
3750    return fwk_metadata;
3751}
3752
3753/*===========================================================================
3754 * FUNCTION   : process_capture_request
3755 *
3756 * DESCRIPTION:
3757 *
3758 * PARAMETERS :
3759 *
3760 *
3761 * RETURN     :
3762 *==========================================================================*/
3763int QCamera3HardwareInterface::process_capture_request(
3764                    const struct camera3_device *device,
3765                    camera3_capture_request_t *request)
3766{
3767    ALOGV("%s: E", __func__);
3768    QCamera3HardwareInterface *hw =
3769        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3770    if (!hw) {
3771        ALOGE("%s: NULL camera device", __func__);
3772        return -EINVAL;
3773    }
3774
3775    int rc = hw->processCaptureRequest(request);
3776    ALOGV("%s: X", __func__);
3777    return rc;
3778}
3779
3780/*===========================================================================
3781 * FUNCTION   : get_metadata_vendor_tag_ops
3782 *
3783 * DESCRIPTION:
3784 *
3785 * PARAMETERS :
3786 *
3787 *
3788 * RETURN     :
3789 *==========================================================================*/
3790
3791void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3792                const struct camera3_device *device,
3793                vendor_tag_query_ops_t* ops)
3794{
3795    ALOGV("%s: E", __func__);
3796    QCamera3HardwareInterface *hw =
3797        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3798    if (!hw) {
3799        ALOGE("%s: NULL camera device", __func__);
3800        return;
3801    }
3802
3803    hw->getMetadataVendorTagOps(ops);
3804    ALOGV("%s: X", __func__);
3805    return;
3806}
3807
3808/*===========================================================================
3809 * FUNCTION   : dump
3810 *
3811 * DESCRIPTION:
3812 *
3813 * PARAMETERS :
3814 *
3815 *
3816 * RETURN     :
3817 *==========================================================================*/
3818
3819void QCamera3HardwareInterface::dump(
3820                const struct camera3_device *device, int fd)
3821{
3822    ALOGV("%s: E", __func__);
3823    QCamera3HardwareInterface *hw =
3824        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3825    if (!hw) {
3826        ALOGE("%s: NULL camera device", __func__);
3827        return;
3828    }
3829
3830    hw->dump(fd);
3831    ALOGV("%s: X", __func__);
3832    return;
3833}
3834
3835/*===========================================================================
3836 * FUNCTION   : flush
3837 *
3838 * DESCRIPTION:
3839 *
3840 * PARAMETERS :
3841 *
3842 *
3843 * RETURN     :
3844 *==========================================================================*/
3845
3846int QCamera3HardwareInterface::flush(
3847                const struct camera3_device *device)
3848{
3849    int rc;
3850    ALOGV("%s: E", __func__);
3851    QCamera3HardwareInterface *hw =
3852        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3853    if (!hw) {
3854        ALOGE("%s: NULL camera device", __func__);
3855        return -EINVAL;
3856    }
3857
3858    rc = hw->flush();
3859    ALOGV("%s: X", __func__);
3860    return rc;
3861}
3862
3863/*===========================================================================
3864 * FUNCTION   : close_camera_device
3865 *
3866 * DESCRIPTION:
3867 *
3868 * PARAMETERS :
3869 *
3870 *
3871 * RETURN     :
3872 *==========================================================================*/
3873int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3874{
3875    ALOGV("%s: E", __func__);
3876    int ret = NO_ERROR;
3877    QCamera3HardwareInterface *hw =
3878        reinterpret_cast<QCamera3HardwareInterface *>(
3879            reinterpret_cast<camera3_device_t *>(device)->priv);
3880    if (!hw) {
3881        ALOGE("NULL camera device");
3882        return BAD_VALUE;
3883    }
3884    delete hw;
3885
3886    pthread_mutex_lock(&mCameraSessionLock);
3887    mCameraSessionActive = 0;
3888    pthread_mutex_unlock(&mCameraSessionLock);
3889    ALOGV("%s: X", __func__);
3890    return ret;
3891}
3892
3893/*===========================================================================
3894 * FUNCTION   : getWaveletDenoiseProcessPlate
3895 *
3896 * DESCRIPTION: query wavelet denoise process plate
3897 *
3898 * PARAMETERS : None
3899 *
3900 * RETURN     : WNR prcocess plate vlaue
3901 *==========================================================================*/
3902cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3903{
3904    char prop[PROPERTY_VALUE_MAX];
3905    memset(prop, 0, sizeof(prop));
3906    property_get("persist.denoise.process.plates", prop, "0");
3907    int processPlate = atoi(prop);
3908    switch(processPlate) {
3909    case 0:
3910        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3911    case 1:
3912        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3913    case 2:
3914        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3915    case 3:
3916        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3917    default:
3918        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3919    }
3920}
3921
3922/*===========================================================================
3923 * FUNCTION   : needRotationReprocess
3924 *
3925 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3926 *
3927 * PARAMETERS : none
3928 *
3929 * RETURN     : true: needed
3930 *              false: no need
3931 *==========================================================================*/
3932bool QCamera3HardwareInterface::needRotationReprocess()
3933{
3934
3935    if (!mJpegSettings->is_jpeg_format) {
3936        // RAW image, no need to reprocess
3937        return false;
3938    }
3939
3940    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3941        mJpegSettings->jpeg_orientation > 0) {
3942        // current rotation is not zero, and pp has the capability to process rotation
3943        ALOGD("%s: need do reprocess for rotation", __func__);
3944        return true;
3945    }
3946
3947    return false;
3948}
3949
3950/*===========================================================================
3951 * FUNCTION   : needReprocess
3952 *
3953 * DESCRIPTION: if reprocess in needed
3954 *
3955 * PARAMETERS : none
3956 *
3957 * RETURN     : true: needed
3958 *              false: no need
3959 *==========================================================================*/
3960bool QCamera3HardwareInterface::needReprocess()
3961{
3962    if (!mJpegSettings->is_jpeg_format) {
3963        // RAW image, no need to reprocess
3964        return false;
3965    }
3966
3967    if ((mJpegSettings->min_required_pp_mask > 0) ||
3968         isWNREnabled()) {
3969        // TODO: add for ZSL HDR later
3970        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3971        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3972        return true;
3973    }
3974    return needRotationReprocess();
3975}
3976
3977/*===========================================================================
3978 * FUNCTION   : addOnlineReprocChannel
3979 *
3980 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3981 *              coming from input channel
3982 *
3983 * PARAMETERS :
3984 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3985 *
3986 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3987 *==========================================================================*/
3988QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3989              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3990{
3991    int32_t rc = NO_ERROR;
3992    QCamera3ReprocessChannel *pChannel = NULL;
3993    if (pInputChannel == NULL) {
3994        ALOGE("%s: input channel obj is NULL", __func__);
3995        return NULL;
3996    }
3997
3998    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3999            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4000    if (NULL == pChannel) {
4001        ALOGE("%s: no mem for reprocess channel", __func__);
4002        return NULL;
4003    }
4004
4005    // Capture channel, only need snapshot and postview streams start together
4006    mm_camera_channel_attr_t attr;
4007    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4008    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4009    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4010    rc = pChannel->initialize();
4011    if (rc != NO_ERROR) {
4012        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4013        delete pChannel;
4014        return NULL;
4015    }
4016
4017    // pp feature config
4018    cam_pp_feature_config_t pp_config;
4019    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4020    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4021        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4022        pp_config.sharpness = mJpegSettings->sharpness;
4023    }
4024
4025    if (isWNREnabled()) {
4026        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4027        pp_config.denoise2d.denoise_enable = 1;
4028        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4029    }
4030    if (needRotationReprocess()) {
4031        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4032        int rotation = mJpegSettings->jpeg_orientation;
4033        if (rotation == 0) {
4034            pp_config.rotation = ROTATE_0;
4035        } else if (rotation == 90) {
4036            pp_config.rotation = ROTATE_90;
4037        } else if (rotation == 180) {
4038            pp_config.rotation = ROTATE_180;
4039        } else if (rotation == 270) {
4040            pp_config.rotation = ROTATE_270;
4041        }
4042    }
4043
4044   rc = pChannel->addReprocStreamsFromSource(pp_config,
4045                                             pInputChannel,
4046                                             mMetadataChannel);
4047
4048    if (rc != NO_ERROR) {
4049        delete pChannel;
4050        return NULL;
4051    }
4052    return pChannel;
4053}
4054
4055int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4056{
4057    return gCamCapability[mCameraId]->min_num_pp_bufs;
4058}
4059
4060bool QCamera3HardwareInterface::isWNREnabled() {
4061    return gCamCapability[mCameraId]->isWnrSupported;
4062}
4063
4064}; //end namespace qcamera
4065