QCamera3HWI.cpp revision 14dfc272241ba78c85a327da2872e71b3208f8c5
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
231        it != mStreamInfo.end(); it++) {
232        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
233        if (channel)
234           channel->stop();
235    }
236    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
237        it != mStreamInfo.end(); it++) {
238        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
239        if (channel)
240            delete channel;
241        free (*it);
242    }
243
244    mPictureChannel = NULL;
245
246    if (mJpegSettings != NULL) {
247        free(mJpegSettings);
248        mJpegSettings = NULL;
249    }
250
251    /* Clean up all channels */
252    if (mCameraInitialized) {
253        mMetadataChannel->stop();
254        delete mMetadataChannel;
255        mMetadataChannel = NULL;
256        deinitParameters();
257    }
258
259    if (mCameraOpened)
260        closeCamera();
261
262    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263        if (mDefaultMetadata[i])
264            free_camera_metadata(mDefaultMetadata[i]);
265
266    pthread_cond_destroy(&mRequestCond);
267
268    pthread_mutex_destroy(&mMutex);
269    ALOGV("%s: X", __func__);
270}
271
272/*===========================================================================
273 * FUNCTION   : openCamera
274 *
275 * DESCRIPTION: open camera
276 *
277 * PARAMETERS :
278 *   @hw_device  : double ptr for camera device struct
279 *
280 * RETURN     : int32_t type of status
281 *              NO_ERROR  -- success
282 *              none-zero failure code
283 *==========================================================================*/
284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
285{
286    int rc = 0;
287    pthread_mutex_lock(&mCameraSessionLock);
288    if (mCameraSessionActive) {
289        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
290        pthread_mutex_unlock(&mCameraSessionLock);
291        return INVALID_OPERATION;
292    }
293
294    if (mCameraOpened) {
295        *hw_device = NULL;
296        return PERMISSION_DENIED;
297    }
298
299    rc = openCamera();
300    if (rc == 0) {
301        *hw_device = &mCameraDevice.common;
302        mCameraSessionActive = 1;
303    } else
304        *hw_device = NULL;
305
306#ifdef HAS_MULTIMEDIA_HINTS
307    if (rc == 0) {
308        if (m_pPowerModule) {
309            if (m_pPowerModule->powerHint) {
310                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
311                        (void *)"state=1");
312            }
313        }
314    }
315#endif
316    pthread_mutex_unlock(&mCameraSessionLock);
317    return rc;
318}
319
320/*===========================================================================
321 * FUNCTION   : openCamera
322 *
323 * DESCRIPTION: open camera
324 *
325 * PARAMETERS : none
326 *
327 * RETURN     : int32_t type of status
328 *              NO_ERROR  -- success
329 *              none-zero failure code
330 *==========================================================================*/
331int QCamera3HardwareInterface::openCamera()
332{
333    if (mCameraHandle) {
334        ALOGE("Failure: Camera already opened");
335        return ALREADY_EXISTS;
336    }
337    mCameraHandle = camera_open(mCameraId);
338    if (!mCameraHandle) {
339        ALOGE("camera_open failed.");
340        return UNKNOWN_ERROR;
341    }
342
343    mCameraOpened = true;
344
345    return NO_ERROR;
346}
347
348/*===========================================================================
349 * FUNCTION   : closeCamera
350 *
351 * DESCRIPTION: close camera
352 *
353 * PARAMETERS : none
354 *
355 * RETURN     : int32_t type of status
356 *              NO_ERROR  -- success
357 *              none-zero failure code
358 *==========================================================================*/
359int QCamera3HardwareInterface::closeCamera()
360{
361    int rc = NO_ERROR;
362
363    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
364    mCameraHandle = NULL;
365    mCameraOpened = false;
366
367#ifdef HAS_MULTIMEDIA_HINTS
368    if (rc == NO_ERROR) {
369        if (m_pPowerModule) {
370            if (m_pPowerModule->powerHint) {
371                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
372                        (void *)"state=0");
373            }
374        }
375    }
376#endif
377
378    return rc;
379}
380
381/*===========================================================================
382 * FUNCTION   : initialize
383 *
384 * DESCRIPTION: Initialize frameworks callback functions
385 *
386 * PARAMETERS :
387 *   @callback_ops : callback function to frameworks
388 *
389 * RETURN     :
390 *
391 *==========================================================================*/
392int QCamera3HardwareInterface::initialize(
393        const struct camera3_callback_ops *callback_ops)
394{
395    int rc;
396
397    pthread_mutex_lock(&mMutex);
398
399    rc = initParameters();
400    if (rc < 0) {
401        ALOGE("%s: initParamters failed %d", __func__, rc);
402       goto err1;
403    }
404    //Create metadata channel and initialize it
405    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
406                    mCameraHandle->ops, captureResultCb,
407                    &gCamCapability[mCameraId]->padding_info, this);
408    if (mMetadataChannel == NULL) {
409        ALOGE("%s: failed to allocate metadata channel", __func__);
410        rc = -ENOMEM;
411        goto err2;
412    }
413    rc = mMetadataChannel->initialize();
414    if (rc < 0) {
415        ALOGE("%s: metadata channel initialization failed", __func__);
416        goto err3;
417    }
418
419    mCallbackOps = callback_ops;
420
421    pthread_mutex_unlock(&mMutex);
422    mCameraInitialized = true;
423    return 0;
424
425err3:
426    delete mMetadataChannel;
427    mMetadataChannel = NULL;
428err2:
429    deinitParameters();
430err1:
431    pthread_mutex_unlock(&mMutex);
432    return rc;
433}
434
435/*===========================================================================
436 * FUNCTION   : configureStreams
437 *
438 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
439 *              and output streams.
440 *
441 * PARAMETERS :
442 *   @stream_list : streams to be configured
443 *
444 * RETURN     :
445 *
446 *==========================================================================*/
447int QCamera3HardwareInterface::configureStreams(
448        camera3_stream_configuration_t *streamList)
449{
450    int rc = 0;
451    mIsZslMode = false;
452    pthread_mutex_lock(&mMutex);
453    // Sanity check stream_list
454    if (streamList == NULL) {
455        ALOGE("%s: NULL stream configuration", __func__);
456        pthread_mutex_unlock(&mMutex);
457        return BAD_VALUE;
458    }
459
460    if (streamList->streams == NULL) {
461        ALOGE("%s: NULL stream list", __func__);
462        pthread_mutex_unlock(&mMutex);
463        return BAD_VALUE;
464    }
465
466    if (streamList->num_streams < 1) {
467        ALOGE("%s: Bad number of streams requested: %d", __func__,
468                streamList->num_streams);
469        pthread_mutex_unlock(&mMutex);
470        return BAD_VALUE;
471    }
472
473    camera3_stream_t *inputStream = NULL;
474    camera3_stream_t *jpegStream = NULL;
475    /* first invalidate all the steams in the mStreamList
476     * if they appear again, they will be validated */
477    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
478            it != mStreamInfo.end(); it++) {
479        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
480        channel->stop();
481        (*it)->status = INVALID;
482    }
483
484    for (size_t i = 0; i < streamList->num_streams; i++) {
485        camera3_stream_t *newStream = streamList->streams[i];
486        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
487                __func__, newStream->stream_type, newStream->format,
488                 newStream->width, newStream->height);
489        //if the stream is in the mStreamList validate it
490        bool stream_exists = false;
491        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
492                it != mStreamInfo.end(); it++) {
493            if ((*it)->stream == newStream) {
494                QCamera3Channel *channel =
495                    (QCamera3Channel*)(*it)->stream->priv;
496                stream_exists = true;
497                (*it)->status = RECONFIGURE;
498                /*delete the channel object associated with the stream because
499                  we need to reconfigure*/
500                delete channel;
501                (*it)->stream->priv = NULL;
502            }
503        }
504        if (!stream_exists) {
505            //new stream
506            stream_info_t* stream_info;
507            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
508            stream_info->stream = newStream;
509            stream_info->status = VALID;
510            stream_info->registered = 0;
511            mStreamInfo.push_back(stream_info);
512        }
513        if (newStream->stream_type == CAMERA3_STREAM_INPUT
514                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
515            if (inputStream != NULL) {
516                ALOGE("%s: Multiple input streams requested!", __func__);
517                pthread_mutex_unlock(&mMutex);
518                return BAD_VALUE;
519            }
520            inputStream = newStream;
521        }
522        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
523            jpegStream = newStream;
524        }
525    }
526    mInputStream = inputStream;
527
528    /*clean up invalid streams*/
529    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
530            it != mStreamInfo.end();) {
531        if(((*it)->status) == INVALID){
532            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
533            delete channel;
534            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
535            free(*it);
536            it = mStreamInfo.erase(it);
537        } else {
538            it++;
539        }
540    }
541
542    //mMetadataChannel->stop();
543
544    /* Allocate channel objects for the requested streams */
545    for (size_t i = 0; i < streamList->num_streams; i++) {
546        camera3_stream_t *newStream = streamList->streams[i];
547        if (newStream->priv == NULL) {
548            //New stream, construct channel
549            switch (newStream->stream_type) {
550            case CAMERA3_STREAM_INPUT:
551                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
552                break;
553            case CAMERA3_STREAM_BIDIRECTIONAL:
554                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
555                    GRALLOC_USAGE_HW_CAMERA_WRITE;
556                break;
557            case CAMERA3_STREAM_OUTPUT:
558                /* For video encoding stream, set read/write rarely
559                 * flag so that they may be set to un-cached */
560                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
561                    newStream->usage =
562                         (GRALLOC_USAGE_SW_READ_RARELY |
563                         GRALLOC_USAGE_SW_WRITE_RARELY |
564                         GRALLOC_USAGE_HW_CAMERA_WRITE);
565                else
566                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
567                break;
568            default:
569                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
570                break;
571            }
572
573            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
574                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
575                QCamera3Channel *channel;
576                switch (newStream->format) {
577                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
578                case HAL_PIXEL_FORMAT_YCbCr_420_888:
579                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
580                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
581                        jpegStream) {
582                        uint32_t width = jpegStream->width;
583                        uint32_t height = jpegStream->height;
584                        mIsZslMode = true;
585                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
586                            mCameraHandle->ops, captureResultCb,
587                            &gCamCapability[mCameraId]->padding_info, this, newStream,
588                            width, height);
589                    } else
590                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
591                            mCameraHandle->ops, captureResultCb,
592                            &gCamCapability[mCameraId]->padding_info, this, newStream);
593                    if (channel == NULL) {
594                        ALOGE("%s: allocation of channel failed", __func__);
595                        pthread_mutex_unlock(&mMutex);
596                        return -ENOMEM;
597                    }
598
599                    newStream->priv = channel;
600                    break;
601                case HAL_PIXEL_FORMAT_BLOB:
602                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
603                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
604                            mCameraHandle->ops, captureResultCb,
605                            &gCamCapability[mCameraId]->padding_info, this, newStream);
606                    if (mPictureChannel == NULL) {
607                        ALOGE("%s: allocation of channel failed", __func__);
608                        pthread_mutex_unlock(&mMutex);
609                        return -ENOMEM;
610                    }
611                    newStream->priv = (QCamera3Channel*)mPictureChannel;
612                    break;
613
614                //TODO: Add support for app consumed format?
615                default:
616                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
617                    break;
618                }
619            }
620        } else {
621            // Channel already exists for this stream
622            // Do nothing for now
623        }
624    }
625    /*For the streams to be reconfigured we need to register the buffers
626      since the framework wont*/
627    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
628            it != mStreamInfo.end(); it++) {
629        if ((*it)->status == RECONFIGURE) {
630            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
631            /*only register buffers for streams that have already been
632              registered*/
633            if ((*it)->registered) {
634                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
635                        (*it)->buffer_set.buffers);
636                if (rc != NO_ERROR) {
637                    ALOGE("%s: Failed to register the buffers of old stream,\
638                            rc = %d", __func__, rc);
639                }
640                ALOGV("%s: channel %p has %d buffers",
641                        __func__, channel, (*it)->buffer_set.num_buffers);
642            }
643        }
644
645        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
646        if (index == NAME_NOT_FOUND) {
647            mPendingBuffersMap.add((*it)->stream, 0);
648        } else {
649            mPendingBuffersMap.editValueAt(index) = 0;
650        }
651    }
652
653    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
654    mPendingRequestsList.clear();
655
656    /*flush the metadata list*/
657    if (!mStoredMetadataList.empty()) {
658        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
659              m != mStoredMetadataList.end(); m++) {
660            mMetadataChannel->bufDone(m->meta_buf);
661            free(m->meta_buf);
662            m = mStoredMetadataList.erase(m);
663        }
664    }
665
666    //settings/parameters don't carry over for new configureStreams
667    memset(mParameters, 0, sizeof(parm_buffer_t));
668    mFirstRequest = true;
669
670    //Get min frame duration for this streams configuration
671    deriveMinFrameDuration();
672
673    pthread_mutex_unlock(&mMutex);
674    return rc;
675}
676
677/*===========================================================================
678 * FUNCTION   : validateCaptureRequest
679 *
680 * DESCRIPTION: validate a capture request from camera service
681 *
682 * PARAMETERS :
683 *   @request : request from framework to process
684 *
685 * RETURN     :
686 *
687 *==========================================================================*/
688int QCamera3HardwareInterface::validateCaptureRequest(
689                    camera3_capture_request_t *request)
690{
691    ssize_t idx = 0;
692    const camera3_stream_buffer_t *b;
693    CameraMetadata meta;
694
695    /* Sanity check the request */
696    if (request == NULL) {
697        ALOGE("%s: NULL capture request", __func__);
698        return BAD_VALUE;
699    }
700
701    uint32_t frameNumber = request->frame_number;
702    if (request->input_buffer != NULL &&
703            request->input_buffer->stream != mInputStream) {
704        ALOGE("%s: Request %d: Input buffer not from input stream!",
705                __FUNCTION__, frameNumber);
706        return BAD_VALUE;
707    }
708    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
709        ALOGE("%s: Request %d: No output buffers provided!",
710                __FUNCTION__, frameNumber);
711        return BAD_VALUE;
712    }
713    if (request->input_buffer != NULL) {
714        b = request->input_buffer;
715        QCamera3Channel *channel =
716            static_cast<QCamera3Channel*>(b->stream->priv);
717        if (channel == NULL) {
718            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
719                    __func__, frameNumber, idx);
720            return BAD_VALUE;
721        }
722        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
723            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
724                    __func__, frameNumber, idx);
725            return BAD_VALUE;
726        }
727        if (b->release_fence != -1) {
728            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
729                    __func__, frameNumber, idx);
730            return BAD_VALUE;
731        }
732        if (b->buffer == NULL) {
733            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
734                    __func__, frameNumber, idx);
735            return BAD_VALUE;
736        }
737    }
738
739    // Validate all buffers
740    b = request->output_buffers;
741    do {
742        QCamera3Channel *channel =
743                static_cast<QCamera3Channel*>(b->stream->priv);
744        if (channel == NULL) {
745            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
746                    __func__, frameNumber, idx);
747            return BAD_VALUE;
748        }
749        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
750            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
751                    __func__, frameNumber, idx);
752            return BAD_VALUE;
753        }
754        if (b->release_fence != -1) {
755            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
756                    __func__, frameNumber, idx);
757            return BAD_VALUE;
758        }
759        if (b->buffer == NULL) {
760            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
761                    __func__, frameNumber, idx);
762            return BAD_VALUE;
763        }
764        idx++;
765        b = request->output_buffers + idx;
766    } while (idx < (ssize_t)request->num_output_buffers);
767
768    return NO_ERROR;
769}
770
771/*===========================================================================
772 * FUNCTION   : deriveMinFrameDuration
773 *
774 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
775 *              on currently configured streams.
776 *
777 * PARAMETERS : NONE
778 *
779 * RETURN     : NONE
780 *
781 *==========================================================================*/
782void QCamera3HardwareInterface::deriveMinFrameDuration()
783{
784    int32_t maxJpegDimension, maxProcessedDimension;
785
786    maxJpegDimension = 0;
787    maxProcessedDimension = 0;
788
789    // Figure out maximum jpeg, processed, and raw dimensions
790    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
791        it != mStreamInfo.end(); it++) {
792
793        // Input stream doesn't have valid stream_type
794        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
795            continue;
796
797        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
798        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
799            if (dimension > maxJpegDimension)
800                maxJpegDimension = dimension;
801        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
802            if (dimension > maxProcessedDimension)
803                maxProcessedDimension = dimension;
804        }
805    }
806
807    //Assume all jpeg dimensions are in processed dimensions.
808    if (maxJpegDimension > maxProcessedDimension)
809        maxProcessedDimension = maxJpegDimension;
810
811    //Find minimum durations for processed, jpeg, and raw
812    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
813    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
814        if (maxProcessedDimension ==
815            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
816            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
817            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
818            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
819            break;
820        }
821    }
822}
823
824/*===========================================================================
825 * FUNCTION   : getMinFrameDuration
826 *
827 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
828 *              and current request configuration.
829 *
830 * PARAMETERS : @request: requset sent by the frameworks
831 *
832 * RETURN     : min farme duration for a particular request
833 *
834 *==========================================================================*/
835int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
836{
837    bool hasJpegStream = false;
838    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
839        const camera3_stream_t *stream = request->output_buffers[i].stream;
840        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
841            hasJpegStream = true;
842    }
843
844    if (!hasJpegStream)
845        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
846    else
847        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
848}
849
850/*===========================================================================
851 * FUNCTION   : registerStreamBuffers
852 *
853 * DESCRIPTION: Register buffers for a given stream with the HAL device.
854 *
855 * PARAMETERS :
856 *   @stream_list : streams to be configured
857 *
858 * RETURN     :
859 *
860 *==========================================================================*/
861int QCamera3HardwareInterface::registerStreamBuffers(
862        const camera3_stream_buffer_set_t *buffer_set)
863{
864    int rc = 0;
865
866    pthread_mutex_lock(&mMutex);
867
868    if (buffer_set == NULL) {
869        ALOGE("%s: Invalid buffer_set parameter.", __func__);
870        pthread_mutex_unlock(&mMutex);
871        return -EINVAL;
872    }
873    if (buffer_set->stream == NULL) {
874        ALOGE("%s: Invalid stream parameter.", __func__);
875        pthread_mutex_unlock(&mMutex);
876        return -EINVAL;
877    }
878    if (buffer_set->num_buffers < 1) {
879        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
880        pthread_mutex_unlock(&mMutex);
881        return -EINVAL;
882    }
883    if (buffer_set->buffers == NULL) {
884        ALOGE("%s: Invalid buffers parameter.", __func__);
885        pthread_mutex_unlock(&mMutex);
886        return -EINVAL;
887    }
888
889    camera3_stream_t *stream = buffer_set->stream;
890    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
891
892    //set the buffer_set in the mStreamInfo array
893    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
894            it != mStreamInfo.end(); it++) {
895        if ((*it)->stream == stream) {
896            uint32_t numBuffers = buffer_set->num_buffers;
897            (*it)->buffer_set.stream = buffer_set->stream;
898            (*it)->buffer_set.num_buffers = numBuffers;
899            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
900            if ((*it)->buffer_set.buffers == NULL) {
901                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
902                pthread_mutex_unlock(&mMutex);
903                return -ENOMEM;
904            }
905            for (size_t j = 0; j < numBuffers; j++){
906                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
907            }
908            (*it)->registered = 1;
909        }
910    }
911    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
912    if (rc < 0) {
913        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
914        pthread_mutex_unlock(&mMutex);
915        return -ENODEV;
916    }
917
918    pthread_mutex_unlock(&mMutex);
919    return NO_ERROR;
920}
921
922/*===========================================================================
923 * FUNCTION   : processCaptureRequest
924 *
925 * DESCRIPTION: process a capture request from camera service
926 *
927 * PARAMETERS :
928 *   @request : request from framework to process
929 *
930 * RETURN     :
931 *
932 *==========================================================================*/
933int QCamera3HardwareInterface::processCaptureRequest(
934                    camera3_capture_request_t *request)
935{
936    int rc = NO_ERROR;
937    int32_t request_id;
938    CameraMetadata meta;
939    MetadataBufferInfo reproc_meta;
940    int queueMetadata = 0;
941
942    pthread_mutex_lock(&mMutex);
943
944    rc = validateCaptureRequest(request);
945    if (rc != NO_ERROR) {
946        ALOGE("%s: incoming request is not valid", __func__);
947        pthread_mutex_unlock(&mMutex);
948        return rc;
949    }
950
951    meta = request->settings;
952
953    // For first capture request, send capture intent, and
954    // stream on all streams
955    if (mFirstRequest) {
956
957        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
958            int32_t hal_version = CAM_HAL_V3;
959            uint8_t captureIntent =
960                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
961
962            memset(mParameters, 0, sizeof(parm_buffer_t));
963            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
964            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
965                sizeof(hal_version), &hal_version);
966            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
967                sizeof(captureIntent), &captureIntent);
968            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
969                mParameters);
970        }
971
972        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
973            it != mStreamInfo.end(); it++) {
974            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
975            channel->start();
976        }
977    }
978
979    uint32_t frameNumber = request->frame_number;
980    uint32_t streamTypeMask = 0;
981
982    if (meta.exists(ANDROID_REQUEST_ID)) {
983        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
984        mCurrentRequestId = request_id;
985        ALOGV("%s: Received request with id: %d",__func__, request_id);
986    } else if (mFirstRequest || mCurrentRequestId == -1){
987        ALOGE("%s: Unable to find request id field, \
988                & no previous id available", __func__);
989        return NAME_NOT_FOUND;
990    } else {
991        ALOGV("%s: Re-using old request id", __func__);
992        request_id = mCurrentRequestId;
993    }
994
995    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
996                                    __func__, __LINE__,
997                                    request->num_output_buffers,
998                                    request->input_buffer,
999                                    frameNumber);
1000    // Acquire all request buffers first
1001    int blob_request = 0;
1002    for (size_t i = 0; i < request->num_output_buffers; i++) {
1003        const camera3_stream_buffer_t& output = request->output_buffers[i];
1004        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1005        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1006
1007        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1008        //Call function to store local copy of jpeg data for encode params.
1009            blob_request = 1;
1010            rc = getJpegSettings(request->settings);
1011            if (rc < 0) {
1012                ALOGE("%s: failed to get jpeg parameters", __func__);
1013                pthread_mutex_unlock(&mMutex);
1014                return rc;
1015            }
1016        }
1017
1018        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1019        if (rc != OK) {
1020            ALOGE("%s: fence wait failed %d", __func__, rc);
1021            pthread_mutex_unlock(&mMutex);
1022            return rc;
1023        }
1024        streamTypeMask |= channel->getStreamTypeMask();
1025    }
1026
1027    rc = setFrameParameters(request, streamTypeMask);
1028    if (rc < 0) {
1029        ALOGE("%s: fail to set frame parameters", __func__);
1030        pthread_mutex_unlock(&mMutex);
1031        return rc;
1032    }
1033
1034    /* Update pending request list and pending buffers map */
1035    PendingRequestInfo pendingRequest;
1036    pendingRequest.frame_number = frameNumber;
1037    pendingRequest.num_buffers = request->num_output_buffers;
1038    pendingRequest.request_id = request_id;
1039    pendingRequest.blob_request = blob_request;
1040    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1041
1042    for (size_t i = 0; i < request->num_output_buffers; i++) {
1043        RequestedBufferInfo requestedBuf;
1044        requestedBuf.stream = request->output_buffers[i].stream;
1045        requestedBuf.buffer = NULL;
1046        pendingRequest.buffers.push_back(requestedBuf);
1047
1048        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1049    }
1050    mPendingRequestsList.push_back(pendingRequest);
1051
1052    // Notify metadata channel we receive a request
1053    mMetadataChannel->request(NULL, frameNumber);
1054
1055    // Call request on other streams
1056    for (size_t i = 0; i < request->num_output_buffers; i++) {
1057        const camera3_stream_buffer_t& output = request->output_buffers[i];
1058        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1059        mm_camera_buf_def_t *pInputBuffer = NULL;
1060
1061        if (channel == NULL) {
1062            ALOGE("%s: invalid channel pointer for stream", __func__);
1063            continue;
1064        }
1065
1066        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1067            QCamera3RegularChannel* inputChannel = NULL;
1068            if(request->input_buffer != NULL){
1069                //Try to get the internal format
1070                inputChannel = (QCamera3RegularChannel*)
1071                    request->input_buffer->stream->priv;
1072                if(inputChannel == NULL ){
1073                    ALOGE("%s: failed to get input channel handle", __func__);
1074                } else {
1075                    pInputBuffer =
1076                        inputChannel->getInternalFormatBuffer(
1077                                request->input_buffer->buffer);
1078                    ALOGD("%s: Input buffer dump",__func__);
1079                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1080                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1081                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1082                    ALOGD("Handle:%p", request->input_buffer->buffer);
1083                    //TODO: need to get corresponding metadata and send it to pproc
1084                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1085                         m != mStoredMetadataList.end(); m++) {
1086                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1087                            reproc_meta.meta_buf = m->meta_buf;
1088                            queueMetadata = 1;
1089                            break;
1090                        }
1091                    }
1092                }
1093            }
1094            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1095                            pInputBuffer,(QCamera3Channel*)inputChannel);
1096            if (queueMetadata) {
1097                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1098            }
1099        } else {
1100            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1101                __LINE__, output.buffer, frameNumber);
1102            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1103                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1104                     m != mStoredMetadataList.end(); m++) {
1105                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1106                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1107                            mMetadataChannel->bufDone(m->meta_buf);
1108                            free(m->meta_buf);
1109                            m = mStoredMetadataList.erase(m);
1110                            break;
1111                        }
1112                   }
1113                }
1114            }
1115            rc = channel->request(output.buffer, frameNumber);
1116        }
1117        if (rc < 0)
1118            ALOGE("%s: request failed", __func__);
1119    }
1120
1121    mFirstRequest = false;
1122
1123    //Block on conditional variable
1124    mPendingRequest = 1;
1125    while (mPendingRequest == 1) {
1126        pthread_cond_wait(&mRequestCond, &mMutex);
1127    }
1128
1129    pthread_mutex_unlock(&mMutex);
1130    return rc;
1131}
1132
1133/*===========================================================================
1134 * FUNCTION   : getMetadataVendorTagOps
1135 *
1136 * DESCRIPTION:
1137 *
1138 * PARAMETERS :
1139 *
1140 *
1141 * RETURN     :
1142 *==========================================================================*/
1143void QCamera3HardwareInterface::getMetadataVendorTagOps(
1144                    vendor_tag_query_ops_t* /*ops*/)
1145{
1146    /* Enable locks when we eventually add Vendor Tags */
1147    /*
1148    pthread_mutex_lock(&mMutex);
1149
1150    pthread_mutex_unlock(&mMutex);
1151    */
1152    return;
1153}
1154
1155/*===========================================================================
1156 * FUNCTION   : dump
1157 *
1158 * DESCRIPTION:
1159 *
1160 * PARAMETERS :
1161 *
1162 *
1163 * RETURN     :
1164 *==========================================================================*/
1165void QCamera3HardwareInterface::dump(int /*fd*/)
1166{
1167    /*Enable lock when we implement this function*/
1168    /*
1169    pthread_mutex_lock(&mMutex);
1170
1171    pthread_mutex_unlock(&mMutex);
1172    */
1173    return;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION   : flush
1178 *
1179 * DESCRIPTION:
1180 *
1181 * PARAMETERS :
1182 *
1183 *
1184 * RETURN     :
1185 *==========================================================================*/
1186int QCamera3HardwareInterface::flush()
1187{
1188    /*Enable lock when we implement this function*/
1189    /*
1190    pthread_mutex_lock(&mMutex);
1191
1192    pthread_mutex_unlock(&mMutex);
1193    */
1194    return 0;
1195}
1196
1197/*===========================================================================
1198 * FUNCTION   : captureResultCb
1199 *
1200 * DESCRIPTION: Callback handler for all capture result
1201 *              (streams, as well as metadata)
1202 *
1203 * PARAMETERS :
1204 *   @metadata : metadata information
1205 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1206 *               NULL if metadata.
1207 *
1208 * RETURN     : NONE
1209 *==========================================================================*/
1210void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1211                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1212{
1213    pthread_mutex_lock(&mMutex);
1214
1215    if (metadata_buf) {
1216        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1217        int32_t frame_number_valid = *(int32_t *)
1218            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1219        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1220            CAM_INTF_META_PENDING_REQUESTS, metadata);
1221        uint32_t frame_number = *(uint32_t *)
1222            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1223        const struct timeval *tv = (const struct timeval *)
1224            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1225        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1226            tv->tv_usec * NSEC_PER_USEC;
1227
1228        if (!frame_number_valid) {
1229            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1230            mMetadataChannel->bufDone(metadata_buf);
1231            goto done_metadata;
1232        }
1233        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1234                frame_number, capture_time);
1235
1236        // Go through the pending requests info and send shutter/results to frameworks
1237        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1238                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1239            camera3_capture_result_t result;
1240            camera3_notify_msg_t notify_msg;
1241            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1242
1243            // Flush out all entries with less or equal frame numbers.
1244
1245            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1246            //Right now it's the same as metadata timestamp
1247
1248            //TODO: When there is metadata drop, how do we derive the timestamp of
1249            //dropped frames? For now, we fake the dropped timestamp by substracting
1250            //from the reported timestamp
1251            nsecs_t current_capture_time = capture_time -
1252                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1253
1254            // Send shutter notify to frameworks
1255            notify_msg.type = CAMERA3_MSG_SHUTTER;
1256            notify_msg.message.shutter.frame_number = i->frame_number;
1257            notify_msg.message.shutter.timestamp = current_capture_time;
1258            mCallbackOps->notify(mCallbackOps, &notify_msg);
1259            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1260                    i->frame_number, capture_time);
1261
1262            // Send empty metadata with already filled buffers for dropped metadata
1263            // and send valid metadata with already filled buffers for current metadata
1264            if (i->frame_number < frame_number) {
1265                CameraMetadata dummyMetadata;
1266                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1267                        &current_capture_time, 1);
1268                dummyMetadata.update(ANDROID_REQUEST_ID,
1269                        &(i->request_id), 1);
1270                result.result = dummyMetadata.release();
1271            } else {
1272                result.result = translateCbMetadataToResultMetadata(metadata,
1273                        current_capture_time, i->request_id);
1274                if (mIsZslMode) {
1275                   int found_metadata = 0;
1276                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1277                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1278                        j != i->buffers.end(); j++) {
1279                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1280                         //check if corresp. zsl already exists in the stored metadata list
1281                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1282                               m != mStoredMetadataList.begin(); m++) {
1283                            if (m->frame_number == frame_number) {
1284                               m->meta_buf = metadata_buf;
1285                               found_metadata = 1;
1286                               break;
1287                            }
1288                         }
1289                         if (!found_metadata) {
1290                            MetadataBufferInfo store_meta_info;
1291                            store_meta_info.meta_buf = metadata_buf;
1292                            store_meta_info.frame_number = frame_number;
1293                            mStoredMetadataList.push_back(store_meta_info);
1294                            found_metadata = 1;
1295                         }
1296                      }
1297                   }
1298                   if (!found_metadata) {
1299                       if (!i->input_buffer_present && i->blob_request) {
1300                          //livesnapshot or fallback non-zsl snapshot case
1301                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1302                                j != i->buffers.end(); j++){
1303                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1304                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1305                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1306                                 break;
1307                              }
1308                         }
1309                       } else {
1310                            //return the metadata immediately
1311                            mMetadataChannel->bufDone(metadata_buf);
1312                            free(metadata_buf);
1313                       }
1314                   }
1315               } else if (!mIsZslMode && i->blob_request) {
1316                   //If it is a blob request then send the metadata to the picture channel
1317                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1318               } else {
1319                   // Return metadata buffer
1320                   mMetadataChannel->bufDone(metadata_buf);
1321                   free(metadata_buf);
1322               }
1323
1324            }
1325            if (!result.result) {
1326                ALOGE("%s: metadata is NULL", __func__);
1327            }
1328            result.frame_number = i->frame_number;
1329            result.num_output_buffers = 0;
1330            result.output_buffers = NULL;
1331            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1332                    j != i->buffers.end(); j++) {
1333                if (j->buffer) {
1334                    result.num_output_buffers++;
1335                }
1336            }
1337
1338            if (result.num_output_buffers > 0) {
1339                camera3_stream_buffer_t *result_buffers =
1340                    new camera3_stream_buffer_t[result.num_output_buffers];
1341                if (!result_buffers) {
1342                    ALOGE("%s: Fatal error: out of memory", __func__);
1343                }
1344                size_t result_buffers_idx = 0;
1345                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1346                        j != i->buffers.end(); j++) {
1347                    if (j->buffer) {
1348                        result_buffers[result_buffers_idx++] = *(j->buffer);
1349                        free(j->buffer);
1350                        j->buffer = NULL;
1351                        mPendingBuffersMap.editValueFor(j->stream)--;
1352                    }
1353                }
1354                result.output_buffers = result_buffers;
1355
1356                mCallbackOps->process_capture_result(mCallbackOps, &result);
1357                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1358                        __func__, result.frame_number, current_capture_time);
1359                free_camera_metadata((camera_metadata_t *)result.result);
1360                delete[] result_buffers;
1361            } else {
1362                mCallbackOps->process_capture_result(mCallbackOps, &result);
1363                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1364                        __func__, result.frame_number, current_capture_time);
1365                free_camera_metadata((camera_metadata_t *)result.result);
1366            }
1367            // erase the element from the list
1368            i = mPendingRequestsList.erase(i);
1369        }
1370
1371
1372done_metadata:
1373        bool max_buffers_dequeued = false;
1374        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1375            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1376            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1377            if (queued_buffers == stream->max_buffers) {
1378                max_buffers_dequeued = true;
1379                break;
1380            }
1381        }
1382        if (!max_buffers_dequeued && !pending_requests) {
1383            // Unblock process_capture_request
1384            mPendingRequest = 0;
1385            pthread_cond_signal(&mRequestCond);
1386        }
1387    } else {
1388        // If the frame number doesn't exist in the pending request list,
1389        // directly send the buffer to the frameworks, and update pending buffers map
1390        // Otherwise, book-keep the buffer.
1391        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1392        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1393            i++;
1394        }
1395        if (i == mPendingRequestsList.end()) {
1396            // Verify all pending requests frame_numbers are greater
1397            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1398                    j != mPendingRequestsList.end(); j++) {
1399                if (j->frame_number < frame_number) {
1400                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1401                            __func__, j->frame_number, frame_number);
1402                }
1403            }
1404            camera3_capture_result_t result;
1405            result.result = NULL;
1406            result.frame_number = frame_number;
1407            result.num_output_buffers = 1;
1408            result.output_buffers = buffer;
1409            ALOGV("%s: result frame_number = %d, buffer = %p",
1410                    __func__, frame_number, buffer);
1411            mPendingBuffersMap.editValueFor(buffer->stream)--;
1412            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1413                int found = 0;
1414                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1415                      k != mStoredMetadataList.end(); k++) {
1416                    if (k->frame_number == frame_number) {
1417                        k->zsl_buf_hdl = buffer->buffer;
1418                        found = 1;
1419                        break;
1420                    }
1421                }
1422                if (!found) {
1423                   MetadataBufferInfo meta_info;
1424                   meta_info.frame_number = frame_number;
1425                   meta_info.zsl_buf_hdl = buffer->buffer;
1426                   mStoredMetadataList.push_back(meta_info);
1427                }
1428            }
1429            mCallbackOps->process_capture_result(mCallbackOps, &result);
1430        } else {
1431            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1432                    j != i->buffers.end(); j++) {
1433                if (j->stream == buffer->stream) {
1434                    if (j->buffer != NULL) {
1435                        ALOGE("%s: Error: buffer is already set", __func__);
1436                    } else {
1437                        j->buffer = (camera3_stream_buffer_t *)malloc(
1438                                sizeof(camera3_stream_buffer_t));
1439                        *(j->buffer) = *buffer;
1440                        ALOGV("%s: cache buffer %p at result frame_number %d",
1441                                __func__, buffer, frame_number);
1442                    }
1443                }
1444            }
1445        }
1446    }
1447    pthread_mutex_unlock(&mMutex);
1448    return;
1449}
1450
1451/*===========================================================================
1452 * FUNCTION   : translateCbMetadataToResultMetadata
1453 *
1454 * DESCRIPTION:
1455 *
1456 * PARAMETERS :
1457 *   @metadata : metadata information from callback
1458 *
1459 * RETURN     : camera_metadata_t*
1460 *              metadata in a format specified by fwk
1461 *==========================================================================*/
1462camera_metadata_t*
1463QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1464                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1465                                 int32_t request_id)
1466{
1467    CameraMetadata camMetadata;
1468    camera_metadata_t* resultMetadata;
1469
1470    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1471    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1472
1473    /*CAM_INTF_META_HISTOGRAM - TODO*/
1474    /*cam_hist_stats_t  *histogram =
1475      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1476      metadata);*/
1477
1478    /*face detection*/
1479    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1480        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1481    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1482    int32_t faceIds[numFaces];
1483    uint8_t faceScores[numFaces];
1484    int32_t faceRectangles[numFaces * 4];
1485    int32_t faceLandmarks[numFaces * 6];
1486    int j = 0, k = 0;
1487    for (int i = 0; i < numFaces; i++) {
1488        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1489        faceScores[i] = faceDetectionInfo->faces[i].score;
1490        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1491                faceRectangles+j, -1);
1492        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1493        j+= 4;
1494        k+= 6;
1495    }
1496    if (numFaces > 0) {
1497        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1498        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1499        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1500            faceRectangles, numFaces*4);
1501        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1502            faceLandmarks, numFaces*6);
1503    }
1504
1505    uint8_t  *color_correct_mode =
1506        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1507    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1508
1509    int32_t  *ae_precapture_id =
1510        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1511    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1512
1513    /*aec regions*/
1514    cam_area_t  *hAeRegions =
1515        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1516    int32_t aeRegions[5];
1517    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1518    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1519
1520    uint8_t *ae_state =
1521            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1522    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1523
1524    uint8_t  *focusMode =
1525        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1526    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1527
1528    /*af regions*/
1529    cam_area_t  *hAfRegions =
1530        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1531    int32_t afRegions[5];
1532    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1533    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1534
1535    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1536    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1537
1538    int32_t  *afTriggerId =
1539        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1540    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1541
1542    uint8_t  *whiteBalance =
1543        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1544    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1545
1546    /*awb regions*/
1547    cam_area_t  *hAwbRegions =
1548        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1549    int32_t awbRegions[5];
1550    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1551    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1552
1553    uint8_t  *whiteBalanceState =
1554        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1555    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1556
1557    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1558    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1559
1560    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1561    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1562
1563    uint8_t  *flashPower =
1564        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1565    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1566
1567    int64_t  *flashFiringTime =
1568        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1569    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1570
1571    /*int32_t  *ledMode =
1572      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1573      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1574
1575    uint8_t  *flashState =
1576        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1577    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1578
1579    uint8_t  *hotPixelMode =
1580        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1581    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1582
1583    float  *lensAperture =
1584        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1585    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1586
1587    float  *filterDensity =
1588        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1589    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1590
1591    float  *focalLength =
1592        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1593    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1594
1595    float  *focusDistance =
1596        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1597    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1598
1599    float  *focusRange =
1600        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1601    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1602
1603    uint8_t  *opticalStab =
1604        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1605    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1606
1607    /*int32_t  *focusState =
1608      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1609      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1610
1611    uint8_t  *noiseRedMode =
1612        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1613    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1614
1615    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1616
1617    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1618        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1619    int32_t scalerCropRegion[4];
1620    scalerCropRegion[0] = hScalerCropRegion->left;
1621    scalerCropRegion[1] = hScalerCropRegion->top;
1622    scalerCropRegion[2] = hScalerCropRegion->width;
1623    scalerCropRegion[3] = hScalerCropRegion->height;
1624    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1625
1626    int64_t  *sensorExpTime =
1627        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1628    mMetadataResponse.exposure_time = *sensorExpTime;
1629    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1630    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1631
1632    int64_t  *sensorFameDuration =
1633        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1634    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1635    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1636
1637    int32_t  *sensorSensitivity =
1638        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1639    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1640    mMetadataResponse.iso_speed = *sensorSensitivity;
1641    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1642
1643    uint8_t  *shadingMode =
1644        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1645    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1646
1647    uint8_t  *faceDetectMode =
1648        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1649    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1650        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1651        *faceDetectMode);
1652    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1653
1654    uint8_t  *histogramMode =
1655        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1656    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1657
1658    uint8_t  *sharpnessMapMode =
1659        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1660    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1661            sharpnessMapMode, 1);
1662
1663    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1664    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1665        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1666    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1667            (int32_t*)sharpnessMap->sharpness,
1668            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1669
1670    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1671        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1672    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1673    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1674    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1675                       (float*)lensShadingMap->lens_shading,
1676                       4*map_width*map_height);
1677
1678    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1679        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1680    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1681
1682    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1683        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1684    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1685                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1686
1687    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1688        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1689    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1690                       predColorCorrectionGains->gains, 4);
1691
1692    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1693        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1694    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1695                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1696
1697    uint8_t *blackLevelLock = (uint8_t*)
1698        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1699    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1700
1701    uint8_t *sceneFlicker = (uint8_t*)
1702        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1703    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1704
1705
1706    resultMetadata = camMetadata.release();
1707    return resultMetadata;
1708}
1709
1710/*===========================================================================
1711 * FUNCTION   : convertToRegions
1712 *
1713 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1714 *
1715 * PARAMETERS :
1716 *   @rect   : cam_rect_t struct to convert
1717 *   @region : int32_t destination array
1718 *   @weight : if we are converting from cam_area_t, weight is valid
1719 *             else weight = -1
1720 *
1721 *==========================================================================*/
1722void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1723    region[0] = rect.left;
1724    region[1] = rect.top;
1725    region[2] = rect.left + rect.width;
1726    region[3] = rect.top + rect.height;
1727    if (weight > -1) {
1728        region[4] = weight;
1729    }
1730}
1731
1732/*===========================================================================
1733 * FUNCTION   : convertFromRegions
1734 *
1735 * DESCRIPTION: helper method to convert from array to cam_rect_t
1736 *
1737 * PARAMETERS :
1738 *   @rect   : cam_rect_t struct to convert
1739 *   @region : int32_t destination array
1740 *   @weight : if we are converting from cam_area_t, weight is valid
1741 *             else weight = -1
1742 *
1743 *==========================================================================*/
1744void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1745                                                   const camera_metadata_t *settings,
1746                                                   uint32_t tag){
1747    CameraMetadata frame_settings;
1748    frame_settings = settings;
1749    int32_t x_min = frame_settings.find(tag).data.i32[0];
1750    int32_t y_min = frame_settings.find(tag).data.i32[1];
1751    int32_t x_max = frame_settings.find(tag).data.i32[2];
1752    int32_t y_max = frame_settings.find(tag).data.i32[3];
1753    roi->weight = frame_settings.find(tag).data.i32[4];
1754    roi->rect.left = x_min;
1755    roi->rect.top = y_min;
1756    roi->rect.width = x_max - x_min;
1757    roi->rect.height = y_max - y_min;
1758}
1759
1760/*===========================================================================
1761 * FUNCTION   : resetIfNeededROI
1762 *
1763 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1764 *              crop region
1765 *
1766 * PARAMETERS :
1767 *   @roi       : cam_area_t struct to resize
1768 *   @scalerCropRegion : cam_crop_region_t region to compare against
1769 *
1770 *
1771 *==========================================================================*/
1772bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1773                                                 const cam_crop_region_t* scalerCropRegion)
1774{
1775    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1776    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1777    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1778    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1779    if ((roi_x_max < scalerCropRegion->left) ||
1780        (roi_y_max < scalerCropRegion->top)  ||
1781        (roi->rect.left > crop_x_max) ||
1782        (roi->rect.top > crop_y_max)){
1783        return false;
1784    }
1785    if (roi->rect.left < scalerCropRegion->left) {
1786        roi->rect.left = scalerCropRegion->left;
1787    }
1788    if (roi->rect.top < scalerCropRegion->top) {
1789        roi->rect.top = scalerCropRegion->top;
1790    }
1791    if (roi_x_max > crop_x_max) {
1792        roi_x_max = crop_x_max;
1793    }
1794    if (roi_y_max > crop_y_max) {
1795        roi_y_max = crop_y_max;
1796    }
1797    roi->rect.width = roi_x_max - roi->rect.left;
1798    roi->rect.height = roi_y_max - roi->rect.top;
1799    return true;
1800}
1801
1802/*===========================================================================
1803 * FUNCTION   : convertLandmarks
1804 *
1805 * DESCRIPTION: helper method to extract the landmarks from face detection info
1806 *
1807 * PARAMETERS :
1808 *   @face   : cam_rect_t struct to convert
1809 *   @landmarks : int32_t destination array
1810 *
1811 *
1812 *==========================================================================*/
1813void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1814{
1815    landmarks[0] = face.left_eye_center.x;
1816    landmarks[1] = face.left_eye_center.y;
1817    landmarks[2] = face.right_eye_center.y;
1818    landmarks[3] = face.right_eye_center.y;
1819    landmarks[4] = face.mouth_center.x;
1820    landmarks[5] = face.mouth_center.y;
1821}
1822
1823#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1824/*===========================================================================
1825 * FUNCTION   : initCapabilities
1826 *
1827 * DESCRIPTION: initialize camera capabilities in static data struct
1828 *
1829 * PARAMETERS :
1830 *   @cameraId  : camera Id
1831 *
1832 * RETURN     : int32_t type of status
1833 *              NO_ERROR  -- success
1834 *              none-zero failure code
1835 *==========================================================================*/
1836int QCamera3HardwareInterface::initCapabilities(int cameraId)
1837{
1838    int rc = 0;
1839    mm_camera_vtbl_t *cameraHandle = NULL;
1840    QCamera3HeapMemory *capabilityHeap = NULL;
1841
1842    cameraHandle = camera_open(cameraId);
1843    if (!cameraHandle) {
1844        ALOGE("%s: camera_open failed", __func__);
1845        rc = -1;
1846        goto open_failed;
1847    }
1848
1849    capabilityHeap = new QCamera3HeapMemory();
1850    if (capabilityHeap == NULL) {
1851        ALOGE("%s: creation of capabilityHeap failed", __func__);
1852        goto heap_creation_failed;
1853    }
1854    /* Allocate memory for capability buffer */
1855    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1856    if(rc != OK) {
1857        ALOGE("%s: No memory for cappability", __func__);
1858        goto allocate_failed;
1859    }
1860
1861    /* Map memory for capability buffer */
1862    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1863    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1864                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1865                                capabilityHeap->getFd(0),
1866                                sizeof(cam_capability_t));
1867    if(rc < 0) {
1868        ALOGE("%s: failed to map capability buffer", __func__);
1869        goto map_failed;
1870    }
1871
1872    /* Query Capability */
1873    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1874    if(rc < 0) {
1875        ALOGE("%s: failed to query capability",__func__);
1876        goto query_failed;
1877    }
1878    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1879    if (!gCamCapability[cameraId]) {
1880        ALOGE("%s: out of memory", __func__);
1881        goto query_failed;
1882    }
1883    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1884                                        sizeof(cam_capability_t));
1885    rc = 0;
1886
1887query_failed:
1888    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1889                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1890map_failed:
1891    capabilityHeap->deallocate();
1892allocate_failed:
1893    delete capabilityHeap;
1894heap_creation_failed:
1895    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1896    cameraHandle = NULL;
1897open_failed:
1898    return rc;
1899}
1900
1901/*===========================================================================
1902 * FUNCTION   : initParameters
1903 *
1904 * DESCRIPTION: initialize camera parameters
1905 *
1906 * PARAMETERS :
1907 *
1908 * RETURN     : int32_t type of status
1909 *              NO_ERROR  -- success
1910 *              none-zero failure code
1911 *==========================================================================*/
1912int QCamera3HardwareInterface::initParameters()
1913{
1914    int rc = 0;
1915
1916    //Allocate Set Param Buffer
1917    mParamHeap = new QCamera3HeapMemory();
1918    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1919    if(rc != OK) {
1920        rc = NO_MEMORY;
1921        ALOGE("Failed to allocate SETPARM Heap memory");
1922        delete mParamHeap;
1923        mParamHeap = NULL;
1924        return rc;
1925    }
1926
1927    //Map memory for parameters buffer
1928    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1929            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1930            mParamHeap->getFd(0),
1931            sizeof(parm_buffer_t));
1932    if(rc < 0) {
1933        ALOGE("%s:failed to map SETPARM buffer",__func__);
1934        rc = FAILED_TRANSACTION;
1935        mParamHeap->deallocate();
1936        delete mParamHeap;
1937        mParamHeap = NULL;
1938        return rc;
1939    }
1940
1941    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1942    return rc;
1943}
1944
1945/*===========================================================================
1946 * FUNCTION   : deinitParameters
1947 *
1948 * DESCRIPTION: de-initialize camera parameters
1949 *
1950 * PARAMETERS :
1951 *
1952 * RETURN     : NONE
1953 *==========================================================================*/
1954void QCamera3HardwareInterface::deinitParameters()
1955{
1956    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1957            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1958
1959    mParamHeap->deallocate();
1960    delete mParamHeap;
1961    mParamHeap = NULL;
1962
1963    mParameters = NULL;
1964}
1965
1966/*===========================================================================
1967 * FUNCTION   : calcMaxJpegSize
1968 *
1969 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1970 *
1971 * PARAMETERS :
1972 *
1973 * RETURN     : max_jpeg_size
1974 *==========================================================================*/
1975int QCamera3HardwareInterface::calcMaxJpegSize()
1976{
1977    int32_t max_jpeg_size = 0;
1978    int temp_width, temp_height;
1979    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1980        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1981        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1982        if (temp_width * temp_height > max_jpeg_size ) {
1983            max_jpeg_size = temp_width * temp_height;
1984        }
1985    }
1986    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1987    return max_jpeg_size;
1988}
1989
1990/*===========================================================================
1991 * FUNCTION   : initStaticMetadata
1992 *
1993 * DESCRIPTION: initialize the static metadata
1994 *
1995 * PARAMETERS :
1996 *   @cameraId  : camera Id
1997 *
1998 * RETURN     : int32_t type of status
1999 *              0  -- success
2000 *              non-zero failure code
2001 *==========================================================================*/
2002int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2003{
2004    int rc = 0;
2005    CameraMetadata staticInfo;
2006
2007    /* android.info: hardware level */
2008    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2009    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2010        &supportedHardwareLevel, 1);
2011
2012    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2013    /*HAL 3 only*/
2014    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2015                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2016
2017    /*hard coded for now but this should come from sensor*/
2018    float min_focus_distance;
2019    if(facingBack){
2020        min_focus_distance = 10;
2021    } else {
2022        min_focus_distance = 0;
2023    }
2024    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2025                    &min_focus_distance, 1);
2026
2027    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2028                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2029
2030    /*should be using focal lengths but sensor doesn't provide that info now*/
2031    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2032                      &gCamCapability[cameraId]->focal_length,
2033                      1);
2034
2035    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2036                      gCamCapability[cameraId]->apertures,
2037                      gCamCapability[cameraId]->apertures_count);
2038
2039    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2040                gCamCapability[cameraId]->filter_densities,
2041                gCamCapability[cameraId]->filter_densities_count);
2042
2043
2044    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2045                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2046                      gCamCapability[cameraId]->optical_stab_modes_count);
2047
2048    staticInfo.update(ANDROID_LENS_POSITION,
2049                      gCamCapability[cameraId]->lens_position,
2050                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2051
2052    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2053                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2054    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2055                      lens_shading_map_size,
2056                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2057
2058    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2059                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2060    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2061            geo_correction_map_size,
2062            sizeof(geo_correction_map_size)/sizeof(int32_t));
2063
2064    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2065                       gCamCapability[cameraId]->geo_correction_map,
2066                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2067
2068    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2069            gCamCapability[cameraId]->sensor_physical_size, 2);
2070
2071    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2072            gCamCapability[cameraId]->exposure_time_range, 2);
2073
2074    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2075            &gCamCapability[cameraId]->max_frame_duration, 1);
2076
2077
2078    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2079                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2080
2081    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2082                                               gCamCapability[cameraId]->pixel_array_size.height};
2083    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2084                      pixel_array_size, 2);
2085
2086    int32_t active_array_size[] = {0, 0,
2087                                                gCamCapability[cameraId]->active_array_size.width,
2088                                                gCamCapability[cameraId]->active_array_size.height};
2089    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2090                      active_array_size, 4);
2091
2092    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2093            &gCamCapability[cameraId]->white_level, 1);
2094
2095    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2096            gCamCapability[cameraId]->black_level_pattern, 4);
2097
2098    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2099                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2100
2101    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2102                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2103
2104    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2105                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2106
2107    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2108                      &gCamCapability[cameraId]->histogram_size, 1);
2109
2110    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2111            &gCamCapability[cameraId]->max_histogram_count, 1);
2112
2113    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2114                                                gCamCapability[cameraId]->sharpness_map_size.height};
2115
2116    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2117            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2118
2119    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2120            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2121
2122
2123    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2124                      &gCamCapability[cameraId]->raw_min_duration,
2125                       1);
2126
2127    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2128                                                HAL_PIXEL_FORMAT_BLOB};
2129    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2130    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2131                      scalar_formats,
2132                      scalar_formats_count);
2133
2134    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2135    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2136              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2137              available_processed_sizes);
2138    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2139                available_processed_sizes,
2140                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2141
2142    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2143                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2144                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2145
2146    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2147    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2148                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2149                 available_fps_ranges);
2150    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2151            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2152
2153    camera_metadata_rational exposureCompensationStep = {
2154            gCamCapability[cameraId]->exp_compensation_step.numerator,
2155            gCamCapability[cameraId]->exp_compensation_step.denominator};
2156    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2157                      &exposureCompensationStep, 1);
2158
2159    /*TO DO*/
2160    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2161    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2162                      availableVstabModes, sizeof(availableVstabModes));
2163
2164    /*HAL 1 and HAL 3 common*/
2165    float maxZoom = 4;
2166    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2167            &maxZoom, 1);
2168
2169    int32_t max3aRegions = 1;
2170    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2171            &max3aRegions, 1);
2172
2173    uint8_t availableFaceDetectModes[] = {
2174            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2175            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2176    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2177                      availableFaceDetectModes,
2178                      sizeof(availableFaceDetectModes));
2179
2180    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2181                                       gCamCapability[cameraId]->raw_dim.height};
2182    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2183                      raw_size,
2184                      sizeof(raw_size)/sizeof(uint32_t));
2185
2186    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2187                                                        gCamCapability[cameraId]->exposure_compensation_max};
2188    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2189            exposureCompensationRange,
2190            sizeof(exposureCompensationRange)/sizeof(int32_t));
2191
2192    uint8_t lensFacing = (facingBack) ?
2193            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2194    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2195
2196    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2197                available_processed_sizes,
2198                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2199
2200    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2201                      available_thumbnail_sizes,
2202                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2203
2204    int32_t max_jpeg_size = 0;
2205    int temp_width, temp_height;
2206    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2207        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2208        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2209        if (temp_width * temp_height > max_jpeg_size ) {
2210            max_jpeg_size = temp_width * temp_height;
2211        }
2212    }
2213    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2214    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2215                      &max_jpeg_size, 1);
2216
2217    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2218    int32_t size = 0;
2219    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2220        int val = lookupFwkName(EFFECT_MODES_MAP,
2221                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2222                                   gCamCapability[cameraId]->supported_effects[i]);
2223        if (val != NAME_NOT_FOUND) {
2224            avail_effects[size] = (uint8_t)val;
2225            size++;
2226        }
2227    }
2228    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2229                      avail_effects,
2230                      size);
2231
2232    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2233    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2234    int32_t supported_scene_modes_cnt = 0;
2235    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2236        int val = lookupFwkName(SCENE_MODES_MAP,
2237                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2238                                gCamCapability[cameraId]->supported_scene_modes[i]);
2239        if (val != NAME_NOT_FOUND) {
2240            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2241            supported_indexes[supported_scene_modes_cnt] = i;
2242            supported_scene_modes_cnt++;
2243        }
2244    }
2245
2246    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2247                      avail_scene_modes,
2248                      supported_scene_modes_cnt);
2249
2250    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2251    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2252                      supported_scene_modes_cnt,
2253                      scene_mode_overrides,
2254                      supported_indexes,
2255                      cameraId);
2256    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2257                      scene_mode_overrides,
2258                      supported_scene_modes_cnt*3);
2259
2260    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2261    size = 0;
2262    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2263        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2264                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2265                                 gCamCapability[cameraId]->supported_antibandings[i]);
2266        if (val != NAME_NOT_FOUND) {
2267            avail_antibanding_modes[size] = (uint8_t)val;
2268            size++;
2269        }
2270
2271    }
2272    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2273                      avail_antibanding_modes,
2274                      size);
2275
2276    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2277    size = 0;
2278    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2279        int val = lookupFwkName(FOCUS_MODES_MAP,
2280                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2281                                gCamCapability[cameraId]->supported_focus_modes[i]);
2282        if (val != NAME_NOT_FOUND) {
2283            avail_af_modes[size] = (uint8_t)val;
2284            size++;
2285        }
2286    }
2287    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2288                      avail_af_modes,
2289                      size);
2290
2291    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2292    size = 0;
2293    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2294        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2295                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2296                                    gCamCapability[cameraId]->supported_white_balances[i]);
2297        if (val != NAME_NOT_FOUND) {
2298            avail_awb_modes[size] = (uint8_t)val;
2299            size++;
2300        }
2301    }
2302    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2303                      avail_awb_modes,
2304                      size);
2305
2306    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2307    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2308      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2309
2310    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2311            available_flash_levels,
2312            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2313
2314
2315    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2316    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2317            &flashAvailable, 1);
2318
2319    uint8_t avail_ae_modes[5];
2320    size = 0;
2321    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2322        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2323        size++;
2324    }
2325    if (flashAvailable) {
2326        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2327        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2328        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2329    }
2330    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2331                      avail_ae_modes,
2332                      size);
2333
2334    int32_t sensitivity_range[2];
2335    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2336    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2337    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2338                      sensitivity_range,
2339                      sizeof(sensitivity_range) / sizeof(int32_t));
2340
2341    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2342                      &gCamCapability[cameraId]->max_analog_sensitivity,
2343                      1);
2344
2345    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2346                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2347                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2348
2349    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2350    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2351                      &sensor_orientation,
2352                      1);
2353
2354    int32_t max_output_streams[3] = {1, 3, 1};
2355    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2356                      max_output_streams,
2357                      3);
2358
2359    gStaticMetadata[cameraId] = staticInfo.release();
2360    return rc;
2361}
2362
2363/*===========================================================================
2364 * FUNCTION   : makeTable
2365 *
2366 * DESCRIPTION: make a table of sizes
2367 *
2368 * PARAMETERS :
2369 *
2370 *
2371 *==========================================================================*/
2372void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2373                                          int32_t* sizeTable)
2374{
2375    int j = 0;
2376    for (int i = 0; i < size; i++) {
2377        sizeTable[j] = dimTable[i].width;
2378        sizeTable[j+1] = dimTable[i].height;
2379        j+=2;
2380    }
2381}
2382
2383/*===========================================================================
2384 * FUNCTION   : makeFPSTable
2385 *
2386 * DESCRIPTION: make a table of fps ranges
2387 *
2388 * PARAMETERS :
2389 *
2390 *==========================================================================*/
2391void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2392                                          int32_t* fpsRangesTable)
2393{
2394    int j = 0;
2395    for (int i = 0; i < size; i++) {
2396        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2397        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2398        j+=2;
2399    }
2400}
2401
2402/*===========================================================================
2403 * FUNCTION   : makeOverridesList
2404 *
2405 * DESCRIPTION: make a list of scene mode overrides
2406 *
2407 * PARAMETERS :
2408 *
2409 *
2410 *==========================================================================*/
2411void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2412                                                  uint8_t size, uint8_t* overridesList,
2413                                                  uint8_t* supported_indexes,
2414                                                  int camera_id)
2415{
2416    /*daemon will give a list of overrides for all scene modes.
2417      However we should send the fwk only the overrides for the scene modes
2418      supported by the framework*/
2419    int j = 0, index = 0, supt = 0;
2420    uint8_t focus_override;
2421    for (int i = 0; i < size; i++) {
2422        supt = 0;
2423        index = supported_indexes[i];
2424        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2425        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2426                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2427                                                    overridesTable[index].awb_mode);
2428        focus_override = (uint8_t)overridesTable[index].af_mode;
2429        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2430           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2431              supt = 1;
2432              break;
2433           }
2434        }
2435        if (supt) {
2436           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2437                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2438                                              focus_override);
2439        } else {
2440           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2441        }
2442        j+=3;
2443    }
2444}
2445
2446/*===========================================================================
2447 * FUNCTION   : getPreviewHalPixelFormat
2448 *
2449 * DESCRIPTION: convert the format to type recognized by framework
2450 *
2451 * PARAMETERS : format : the format from backend
2452 *
2453 ** RETURN    : format recognized by framework
2454 *
2455 *==========================================================================*/
2456int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2457{
2458    int32_t halPixelFormat;
2459
2460    switch (format) {
2461    case CAM_FORMAT_YUV_420_NV12:
2462        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2463        break;
2464    case CAM_FORMAT_YUV_420_NV21:
2465        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2466        break;
2467    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2468        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2469        break;
2470    case CAM_FORMAT_YUV_420_YV12:
2471        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2472        break;
2473    case CAM_FORMAT_YUV_422_NV16:
2474    case CAM_FORMAT_YUV_422_NV61:
2475    default:
2476        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2477        break;
2478    }
2479    return halPixelFormat;
2480}
2481
2482/*===========================================================================
2483 * FUNCTION   : getSensorSensitivity
2484 *
2485 * DESCRIPTION: convert iso_mode to an integer value
2486 *
2487 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2488 *
2489 ** RETURN    : sensitivity supported by sensor
2490 *
2491 *==========================================================================*/
2492int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2493{
2494    int32_t sensitivity;
2495
2496    switch (iso_mode) {
2497    case CAM_ISO_MODE_100:
2498        sensitivity = 100;
2499        break;
2500    case CAM_ISO_MODE_200:
2501        sensitivity = 200;
2502        break;
2503    case CAM_ISO_MODE_400:
2504        sensitivity = 400;
2505        break;
2506    case CAM_ISO_MODE_800:
2507        sensitivity = 800;
2508        break;
2509    case CAM_ISO_MODE_1600:
2510        sensitivity = 1600;
2511        break;
2512    default:
2513        sensitivity = -1;
2514        break;
2515    }
2516    return sensitivity;
2517}
2518
2519
2520/*===========================================================================
2521 * FUNCTION   : AddSetParmEntryToBatch
2522 *
2523 * DESCRIPTION: add set parameter entry into batch
2524 *
2525 * PARAMETERS :
2526 *   @p_table     : ptr to parameter buffer
2527 *   @paramType   : parameter type
2528 *   @paramLength : length of parameter value
2529 *   @paramValue  : ptr to parameter value
2530 *
2531 * RETURN     : int32_t type of status
2532 *              NO_ERROR  -- success
2533 *              none-zero failure code
2534 *==========================================================================*/
2535int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2536                                                          cam_intf_parm_type_t paramType,
2537                                                          uint32_t paramLength,
2538                                                          void *paramValue)
2539{
2540    int position = paramType;
2541    int current, next;
2542
2543    /*************************************************************************
2544    *                 Code to take care of linking next flags                *
2545    *************************************************************************/
2546    current = GET_FIRST_PARAM_ID(p_table);
2547    if (position == current){
2548        //DO NOTHING
2549    } else if (position < current){
2550        SET_NEXT_PARAM_ID(position, p_table, current);
2551        SET_FIRST_PARAM_ID(p_table, position);
2552    } else {
2553        /* Search for the position in the linked list where we need to slot in*/
2554        while (position > GET_NEXT_PARAM_ID(current, p_table))
2555            current = GET_NEXT_PARAM_ID(current, p_table);
2556
2557        /*If node already exists no need to alter linking*/
2558        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2559            next = GET_NEXT_PARAM_ID(current, p_table);
2560            SET_NEXT_PARAM_ID(current, p_table, position);
2561            SET_NEXT_PARAM_ID(position, p_table, next);
2562        }
2563    }
2564
2565    /*************************************************************************
2566    *                   Copy contents into entry                             *
2567    *************************************************************************/
2568
2569    if (paramLength > sizeof(parm_type_t)) {
2570        ALOGE("%s:Size of input larger than max entry size",__func__);
2571        return BAD_VALUE;
2572    }
2573    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2574    return NO_ERROR;
2575}
2576
2577/*===========================================================================
2578 * FUNCTION   : lookupFwkName
2579 *
2580 * DESCRIPTION: In case the enum is not same in fwk and backend
2581 *              make sure the parameter is correctly propogated
2582 *
2583 * PARAMETERS  :
2584 *   @arr      : map between the two enums
2585 *   @len      : len of the map
2586 *   @hal_name : name of the hal_parm to map
2587 *
2588 * RETURN     : int type of status
2589 *              fwk_name  -- success
2590 *              none-zero failure code
2591 *==========================================================================*/
2592int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2593                                             int len, int hal_name)
2594{
2595
2596    for (int i = 0; i < len; i++) {
2597        if (arr[i].hal_name == hal_name)
2598            return arr[i].fwk_name;
2599    }
2600
2601    /* Not able to find matching framework type is not necessarily
2602     * an error case. This happens when mm-camera supports more attributes
2603     * than the frameworks do */
2604    ALOGD("%s: Cannot find matching framework type", __func__);
2605    return NAME_NOT_FOUND;
2606}
2607
2608/*===========================================================================
2609 * FUNCTION   : lookupHalName
2610 *
2611 * DESCRIPTION: In case the enum is not same in fwk and backend
2612 *              make sure the parameter is correctly propogated
2613 *
2614 * PARAMETERS  :
2615 *   @arr      : map between the two enums
2616 *   @len      : len of the map
2617 *   @fwk_name : name of the hal_parm to map
2618 *
2619 * RETURN     : int32_t type of status
2620 *              hal_name  -- success
2621 *              none-zero failure code
2622 *==========================================================================*/
2623int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2624                                             int len, int fwk_name)
2625{
2626    for (int i = 0; i < len; i++) {
2627       if (arr[i].fwk_name == fwk_name)
2628           return arr[i].hal_name;
2629    }
2630    ALOGE("%s: Cannot find matching hal type", __func__);
2631    return NAME_NOT_FOUND;
2632}
2633
2634/*===========================================================================
2635 * FUNCTION   : getCapabilities
2636 *
2637 * DESCRIPTION: query camera capabilities
2638 *
2639 * PARAMETERS :
2640 *   @cameraId  : camera Id
2641 *   @info      : camera info struct to be filled in with camera capabilities
2642 *
2643 * RETURN     : int32_t type of status
2644 *              NO_ERROR  -- success
2645 *              none-zero failure code
2646 *==========================================================================*/
2647int QCamera3HardwareInterface::getCamInfo(int cameraId,
2648                                    struct camera_info *info)
2649{
2650    int rc = 0;
2651
2652    if (NULL == gCamCapability[cameraId]) {
2653        rc = initCapabilities(cameraId);
2654        if (rc < 0) {
2655            //pthread_mutex_unlock(&g_camlock);
2656            return rc;
2657        }
2658    }
2659
2660    if (NULL == gStaticMetadata[cameraId]) {
2661        rc = initStaticMetadata(cameraId);
2662        if (rc < 0) {
2663            return rc;
2664        }
2665    }
2666
2667    switch(gCamCapability[cameraId]->position) {
2668    case CAM_POSITION_BACK:
2669        info->facing = CAMERA_FACING_BACK;
2670        break;
2671
2672    case CAM_POSITION_FRONT:
2673        info->facing = CAMERA_FACING_FRONT;
2674        break;
2675
2676    default:
2677        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2678        rc = -1;
2679        break;
2680    }
2681
2682
2683    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2684    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2685    info->static_camera_characteristics = gStaticMetadata[cameraId];
2686
2687    return rc;
2688}
2689
2690/*===========================================================================
2691 * FUNCTION   : translateMetadata
2692 *
2693 * DESCRIPTION: translate the metadata into camera_metadata_t
2694 *
2695 * PARAMETERS : type of the request
2696 *
2697 *
2698 * RETURN     : success: camera_metadata_t*
2699 *              failure: NULL
2700 *
2701 *==========================================================================*/
2702camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2703{
2704    pthread_mutex_lock(&mMutex);
2705
2706    if (mDefaultMetadata[type] != NULL) {
2707        pthread_mutex_unlock(&mMutex);
2708        return mDefaultMetadata[type];
2709    }
2710    //first time we are handling this request
2711    //fill up the metadata structure using the wrapper class
2712    CameraMetadata settings;
2713    //translate from cam_capability_t to camera_metadata_tag_t
2714    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2715    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2716
2717    /*control*/
2718
2719    uint8_t controlIntent = 0;
2720    switch (type) {
2721      case CAMERA3_TEMPLATE_PREVIEW:
2722        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2723        break;
2724      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2725        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2726        break;
2727      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2728        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2729        break;
2730      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2731        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2732        break;
2733      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2734        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2735        break;
2736      default:
2737        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2738        break;
2739    }
2740    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2741
2742    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2743            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2744
2745    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2746    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2747
2748    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2749    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2750
2751    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2752    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2753
2754    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2755    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2756
2757    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2758    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2759
2760    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2761    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2762
2763    static uint8_t focusMode;
2764    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2765        ALOGE("%s: Setting focus mode to auto", __func__);
2766        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2767    } else {
2768        ALOGE("%s: Setting focus mode to off", __func__);
2769        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2770    }
2771    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2772
2773    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2774    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2775
2776    /*flash*/
2777    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2778    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2779
2780    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2781    settings.update(ANDROID_FLASH_FIRING_POWER,
2782            &flashFiringLevel, 1);
2783
2784    /* lens */
2785    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2786    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2787
2788    if (gCamCapability[mCameraId]->filter_densities_count) {
2789        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2790        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2791                        gCamCapability[mCameraId]->filter_densities_count);
2792    }
2793
2794    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2795    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2796
2797    /* frame duration */
2798    int64_t default_frame_duration = NSEC_PER_33MSEC;
2799    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2800
2801    /* sensitivity */
2802    int32_t default_sensitivity = 100;
2803    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2804
2805    mDefaultMetadata[type] = settings.release();
2806
2807    pthread_mutex_unlock(&mMutex);
2808    return mDefaultMetadata[type];
2809}
2810
2811/*===========================================================================
2812 * FUNCTION   : setFrameParameters
2813 *
2814 * DESCRIPTION: set parameters per frame as requested in the metadata from
2815 *              framework
2816 *
2817 * PARAMETERS :
2818 *   @request   : request that needs to be serviced
2819 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2820 *
2821 * RETURN     : success: NO_ERROR
2822 *              failure:
2823 *==========================================================================*/
2824int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2825                    uint32_t streamTypeMask)
2826{
2827    /*translate from camera_metadata_t type to parm_type_t*/
2828    int rc = 0;
2829    if (request->settings == NULL && mFirstRequest) {
2830        /*settings cannot be null for the first request*/
2831        return BAD_VALUE;
2832    }
2833
2834    int32_t hal_version = CAM_HAL_V3;
2835
2836    memset(mParameters, 0, sizeof(parm_buffer_t));
2837    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2838    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2839                sizeof(hal_version), &hal_version);
2840    if (rc < 0) {
2841        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2842        return BAD_VALUE;
2843    }
2844
2845    /*we need to update the frame number in the parameters*/
2846    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2847                                sizeof(request->frame_number), &(request->frame_number));
2848    if (rc < 0) {
2849        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2850        return BAD_VALUE;
2851    }
2852
2853    /* Update stream id mask where buffers are requested */
2854    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2855                                sizeof(streamTypeMask), &streamTypeMask);
2856    if (rc < 0) {
2857        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2858        return BAD_VALUE;
2859    }
2860
2861    if(request->settings != NULL){
2862        rc = translateMetadataToParameters(request);
2863    }
2864    /*set the parameters to backend*/
2865    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2866    return rc;
2867}
2868
2869/*===========================================================================
2870 * FUNCTION   : translateMetadataToParameters
2871 *
2872 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2873 *
2874 *
2875 * PARAMETERS :
2876 *   @request  : request sent from framework
2877 *
2878 *
2879 * RETURN     : success: NO_ERROR
2880 *              failure:
2881 *==========================================================================*/
2882int QCamera3HardwareInterface::translateMetadataToParameters
2883                                  (const camera3_capture_request_t *request)
2884{
2885    int rc = 0;
2886    CameraMetadata frame_settings;
2887    frame_settings = request->settings;
2888
2889    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2890        int32_t antibandingMode =
2891            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2892        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2893                sizeof(antibandingMode), &antibandingMode);
2894    }
2895
2896    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2897        int32_t expCompensation = frame_settings.find(
2898            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2899        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2900            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2901        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2902            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2903        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2904          sizeof(expCompensation), &expCompensation);
2905    }
2906
2907    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2908        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2909        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2910                sizeof(aeLock), &aeLock);
2911    }
2912    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2913        cam_fps_range_t fps_range;
2914        fps_range.min_fps =
2915            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2916        fps_range.max_fps =
2917            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2918        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2919                sizeof(fps_range), &fps_range);
2920    }
2921
2922    float focalDistance = -1.0;
2923    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2924        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2925        rc = AddSetParmEntryToBatch(mParameters,
2926                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2927                sizeof(focalDistance), &focalDistance);
2928    }
2929
2930    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2931        uint8_t fwk_focusMode =
2932            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2933        uint8_t focusMode;
2934        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2935            focusMode = CAM_FOCUS_MODE_INFINITY;
2936        } else{
2937         focusMode = lookupHalName(FOCUS_MODES_MAP,
2938                                   sizeof(FOCUS_MODES_MAP),
2939                                   fwk_focusMode);
2940        }
2941        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2942                sizeof(focusMode), &focusMode);
2943    }
2944
2945    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2946        uint8_t awbLock =
2947            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2948        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2949                sizeof(awbLock), &awbLock);
2950    }
2951
2952    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2953        uint8_t fwk_whiteLevel =
2954            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2955        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2956                sizeof(WHITE_BALANCE_MODES_MAP),
2957                fwk_whiteLevel);
2958        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2959                sizeof(whiteLevel), &whiteLevel);
2960    }
2961
2962    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2963        uint8_t fwk_effectMode =
2964            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2965        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2966                sizeof(EFFECT_MODES_MAP),
2967                fwk_effectMode);
2968        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2969                sizeof(effectMode), &effectMode);
2970    }
2971
2972    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2973        uint8_t fwk_aeMode =
2974            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2975        uint8_t aeMode;
2976        int32_t redeye;
2977
2978        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2979            aeMode = CAM_AE_MODE_OFF;
2980        } else {
2981            aeMode = CAM_AE_MODE_ON;
2982        }
2983        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2984            redeye = 1;
2985        } else {
2986            redeye = 0;
2987        }
2988
2989        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2990                                          sizeof(AE_FLASH_MODE_MAP),
2991                                          fwk_aeMode);
2992        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2993                sizeof(aeMode), &aeMode);
2994        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2995                sizeof(flashMode), &flashMode);
2996        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2997                sizeof(redeye), &redeye);
2998    }
2999
3000    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3001        uint8_t colorCorrectMode =
3002            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3003        rc =
3004            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3005                    sizeof(colorCorrectMode), &colorCorrectMode);
3006    }
3007
3008    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3009        cam_color_correct_gains_t colorCorrectGains;
3010        for (int i = 0; i < 4; i++) {
3011            colorCorrectGains.gains[i] =
3012                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3013        }
3014        rc =
3015            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3016                    sizeof(colorCorrectGains), &colorCorrectGains);
3017    }
3018
3019    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3020        cam_color_correct_matrix_t colorCorrectTransform;
3021        cam_rational_type_t transform_elem;
3022        int num = 0;
3023        for (int i = 0; i < 3; i++) {
3024           for (int j = 0; j < 3; j++) {
3025              transform_elem.numerator =
3026                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3027              transform_elem.denominator =
3028                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3029              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3030              num++;
3031           }
3032        }
3033        rc =
3034            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3035                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3036    }
3037
3038    cam_trigger_t aecTrigger;
3039    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3040    aecTrigger.trigger_id = -1;
3041    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3042        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3043        aecTrigger.trigger =
3044            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3045        aecTrigger.trigger_id =
3046            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3047    }
3048    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3049                                sizeof(aecTrigger), &aecTrigger);
3050
3051    /*af_trigger must come with a trigger id*/
3052    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3053        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3054        cam_trigger_t af_trigger;
3055        af_trigger.trigger =
3056            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3057        af_trigger.trigger_id =
3058            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3059        rc = AddSetParmEntryToBatch(mParameters,
3060                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3061    }
3062
3063    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3064        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3065        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3066                sizeof(metaMode), &metaMode);
3067        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3068           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3069           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3070                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3071                                             fwk_sceneMode);
3072           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3073                sizeof(sceneMode), &sceneMode);
3074        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3075           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3076           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3077                sizeof(sceneMode), &sceneMode);
3078        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3079           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3080           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3081                sizeof(sceneMode), &sceneMode);
3082        }
3083    }
3084
3085    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3086        int32_t demosaic =
3087            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3088        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3089                sizeof(demosaic), &demosaic);
3090    }
3091
3092    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3093        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3094        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3095                sizeof(edgeMode), &edgeMode);
3096    }
3097
3098    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3099        int32_t edgeStrength =
3100            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3101        rc = AddSetParmEntryToBatch(mParameters,
3102                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3103    }
3104
3105    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3106        int32_t respectFlashMode = 1;
3107        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3108            uint8_t fwk_aeMode =
3109                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3110            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3111                respectFlashMode = 0;
3112                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3113                    __func__);
3114            }
3115        }
3116        if (respectFlashMode) {
3117            uint8_t flashMode =
3118                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3119            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3120                                          sizeof(FLASH_MODES_MAP),
3121                                          flashMode);
3122            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3123            // To check: CAM_INTF_META_FLASH_MODE usage
3124            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3125                          sizeof(flashMode), &flashMode);
3126        }
3127    }
3128
3129    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3130        uint8_t flashPower =
3131            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3132        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3133                sizeof(flashPower), &flashPower);
3134    }
3135
3136    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3137        int64_t flashFiringTime =
3138            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3139        rc = AddSetParmEntryToBatch(mParameters,
3140                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3141    }
3142
3143    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3144        uint8_t geometricMode =
3145            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3146        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3147                sizeof(geometricMode), &geometricMode);
3148    }
3149
3150    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3151        uint8_t geometricStrength =
3152            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3153        rc = AddSetParmEntryToBatch(mParameters,
3154                CAM_INTF_META_GEOMETRIC_STRENGTH,
3155                sizeof(geometricStrength), &geometricStrength);
3156    }
3157
3158    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3159        uint8_t hotPixelMode =
3160            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3161        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3162                sizeof(hotPixelMode), &hotPixelMode);
3163    }
3164
3165    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3166        float lensAperture =
3167            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3168        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3169                sizeof(lensAperture), &lensAperture);
3170    }
3171
3172    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3173        float filterDensity =
3174            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3175        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3176                sizeof(filterDensity), &filterDensity);
3177    }
3178
3179    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3180        float focalLength =
3181            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3182        rc = AddSetParmEntryToBatch(mParameters,
3183                CAM_INTF_META_LENS_FOCAL_LENGTH,
3184                sizeof(focalLength), &focalLength);
3185    }
3186
3187    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3188        uint8_t optStabMode =
3189            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3190        rc = AddSetParmEntryToBatch(mParameters,
3191                CAM_INTF_META_LENS_OPT_STAB_MODE,
3192                sizeof(optStabMode), &optStabMode);
3193    }
3194
3195    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3196        uint8_t noiseRedMode =
3197            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3198        rc = AddSetParmEntryToBatch(mParameters,
3199                CAM_INTF_META_NOISE_REDUCTION_MODE,
3200                sizeof(noiseRedMode), &noiseRedMode);
3201    }
3202
3203    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3204        uint8_t noiseRedStrength =
3205            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3206        rc = AddSetParmEntryToBatch(mParameters,
3207                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3208                sizeof(noiseRedStrength), &noiseRedStrength);
3209    }
3210
3211    cam_crop_region_t scalerCropRegion;
3212    bool scalerCropSet = false;
3213    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3214        scalerCropRegion.left =
3215            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3216        scalerCropRegion.top =
3217            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3218        scalerCropRegion.width =
3219            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3220        scalerCropRegion.height =
3221            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3222        rc = AddSetParmEntryToBatch(mParameters,
3223                CAM_INTF_META_SCALER_CROP_REGION,
3224                sizeof(scalerCropRegion), &scalerCropRegion);
3225        scalerCropSet = true;
3226    }
3227
3228    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3229        int64_t sensorExpTime =
3230            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3231        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3232        rc = AddSetParmEntryToBatch(mParameters,
3233                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3234                sizeof(sensorExpTime), &sensorExpTime);
3235    }
3236
3237    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3238        int64_t sensorFrameDuration =
3239            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3240        int64_t minFrameDuration = getMinFrameDuration(request);
3241        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3242        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3243            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3244        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3245        rc = AddSetParmEntryToBatch(mParameters,
3246                CAM_INTF_META_SENSOR_FRAME_DURATION,
3247                sizeof(sensorFrameDuration), &sensorFrameDuration);
3248    }
3249
3250    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3251        int32_t sensorSensitivity =
3252            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3253        if (sensorSensitivity <
3254                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3255            sensorSensitivity =
3256                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3257        if (sensorSensitivity >
3258                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3259            sensorSensitivity =
3260                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3261        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3262        rc = AddSetParmEntryToBatch(mParameters,
3263                CAM_INTF_META_SENSOR_SENSITIVITY,
3264                sizeof(sensorSensitivity), &sensorSensitivity);
3265    }
3266
3267    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3268        int32_t shadingMode =
3269            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3270        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3271                sizeof(shadingMode), &shadingMode);
3272    }
3273
3274    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3275        uint8_t shadingStrength =
3276            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3277        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3278                sizeof(shadingStrength), &shadingStrength);
3279    }
3280
3281    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3282        uint8_t fwk_facedetectMode =
3283            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3284        uint8_t facedetectMode =
3285            lookupHalName(FACEDETECT_MODES_MAP,
3286                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3287        rc = AddSetParmEntryToBatch(mParameters,
3288                CAM_INTF_META_STATS_FACEDETECT_MODE,
3289                sizeof(facedetectMode), &facedetectMode);
3290    }
3291
3292    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3293        uint8_t histogramMode =
3294            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3295        rc = AddSetParmEntryToBatch(mParameters,
3296                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3297                sizeof(histogramMode), &histogramMode);
3298    }
3299
3300    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3301        uint8_t sharpnessMapMode =
3302            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3303        rc = AddSetParmEntryToBatch(mParameters,
3304                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3305                sizeof(sharpnessMapMode), &sharpnessMapMode);
3306    }
3307
3308    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3309        uint8_t tonemapMode =
3310            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3311        rc = AddSetParmEntryToBatch(mParameters,
3312                CAM_INTF_META_TONEMAP_MODE,
3313                sizeof(tonemapMode), &tonemapMode);
3314    }
3315    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3316    /*All tonemap channels will have the same number of points*/
3317    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3318        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3319        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3320        cam_rgb_tonemap_curves tonemapCurves;
3321        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3322
3323        /* ch0 = G*/
3324        int point = 0;
3325        cam_tonemap_curve_t tonemapCurveGreen;
3326        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3327            for (int j = 0; j < 2; j++) {
3328               tonemapCurveGreen.tonemap_points[i][j] =
3329                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3330               point++;
3331            }
3332        }
3333        tonemapCurves.curves[0] = tonemapCurveGreen;
3334
3335        /* ch 1 = B */
3336        point = 0;
3337        cam_tonemap_curve_t tonemapCurveBlue;
3338        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3339            for (int j = 0; j < 2; j++) {
3340               tonemapCurveBlue.tonemap_points[i][j] =
3341                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3342               point++;
3343            }
3344        }
3345        tonemapCurves.curves[1] = tonemapCurveBlue;
3346
3347        /* ch 2 = R */
3348        point = 0;
3349        cam_tonemap_curve_t tonemapCurveRed;
3350        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3351            for (int j = 0; j < 2; j++) {
3352               tonemapCurveRed.tonemap_points[i][j] =
3353                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3354               point++;
3355            }
3356        }
3357        tonemapCurves.curves[2] = tonemapCurveRed;
3358
3359        rc = AddSetParmEntryToBatch(mParameters,
3360                CAM_INTF_META_TONEMAP_CURVES,
3361                sizeof(tonemapCurves), &tonemapCurves);
3362    }
3363
3364    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3365        uint8_t captureIntent =
3366            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3367        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3368                sizeof(captureIntent), &captureIntent);
3369    }
3370
3371    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3372        uint8_t blackLevelLock =
3373            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3374        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3375                sizeof(blackLevelLock), &blackLevelLock);
3376    }
3377
3378    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3379        uint8_t lensShadingMapMode =
3380            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3381        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3382                sizeof(lensShadingMapMode), &lensShadingMapMode);
3383    }
3384
3385    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3386        cam_area_t roi;
3387        bool reset = true;
3388        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3389        if (scalerCropSet) {
3390            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3391        }
3392        if (reset) {
3393            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3394                    sizeof(roi), &roi);
3395        }
3396    }
3397
3398    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3399        cam_area_t roi;
3400        bool reset = true;
3401        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3402        if (scalerCropSet) {
3403            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3404        }
3405        if (reset) {
3406            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3407                    sizeof(roi), &roi);
3408        }
3409    }
3410
3411    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3412        cam_area_t roi;
3413        bool reset = true;
3414        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3415        if (scalerCropSet) {
3416            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3417        }
3418        if (reset) {
3419            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3420                    sizeof(roi), &roi);
3421        }
3422    }
3423    return rc;
3424}
3425
3426/*===========================================================================
3427 * FUNCTION   : getJpegSettings
3428 *
3429 * DESCRIPTION: save the jpeg settings in the HAL
3430 *
3431 *
3432 * PARAMETERS :
3433 *   @settings  : frame settings information from framework
3434 *
3435 *
3436 * RETURN     : success: NO_ERROR
3437 *              failure:
3438 *==========================================================================*/
3439int QCamera3HardwareInterface::getJpegSettings
3440                                  (const camera_metadata_t *settings)
3441{
3442    if (mJpegSettings) {
3443        if (mJpegSettings->gps_timestamp) {
3444            free(mJpegSettings->gps_timestamp);
3445            mJpegSettings->gps_timestamp = NULL;
3446        }
3447        if (mJpegSettings->gps_coordinates) {
3448            for (int i = 0; i < 3; i++) {
3449                free(mJpegSettings->gps_coordinates[i]);
3450                mJpegSettings->gps_coordinates[i] = NULL;
3451            }
3452        }
3453        free(mJpegSettings);
3454        mJpegSettings = NULL;
3455    }
3456    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3457    CameraMetadata jpeg_settings;
3458    jpeg_settings = settings;
3459
3460    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3461        mJpegSettings->jpeg_orientation =
3462            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3463    } else {
3464        mJpegSettings->jpeg_orientation = 0;
3465    }
3466    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3467        mJpegSettings->jpeg_quality =
3468            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3469    } else {
3470        mJpegSettings->jpeg_quality = 85;
3471    }
3472    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3473        mJpegSettings->thumbnail_size.width =
3474            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3475        mJpegSettings->thumbnail_size.height =
3476            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3477    } else {
3478        mJpegSettings->thumbnail_size.width = 0;
3479        mJpegSettings->thumbnail_size.height = 0;
3480    }
3481    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3482        for (int i = 0; i < 3; i++) {
3483            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3484            *(mJpegSettings->gps_coordinates[i]) =
3485                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3486        }
3487    } else{
3488       for (int i = 0; i < 3; i++) {
3489            mJpegSettings->gps_coordinates[i] = NULL;
3490        }
3491    }
3492
3493    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3494        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3495        *(mJpegSettings->gps_timestamp) =
3496            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3497    } else {
3498        mJpegSettings->gps_timestamp = NULL;
3499    }
3500
3501    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3502        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3503        for (int i = 0; i < len; i++) {
3504            mJpegSettings->gps_processing_method[i] =
3505                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3506        }
3507        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3508            mJpegSettings->gps_processing_method[len] = '\0';
3509        }
3510    } else {
3511        mJpegSettings->gps_processing_method[0] = '\0';
3512    }
3513
3514    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3515        mJpegSettings->sensor_sensitivity =
3516            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3517    } else {
3518        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3519    }
3520
3521    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3522
3523    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3524        mJpegSettings->lens_focal_length =
3525            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3526    }
3527    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3528        mJpegSettings->exposure_compensation =
3529            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3530    }
3531    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3532    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3533    mJpegSettings->is_jpeg_format = true;
3534    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3535    return 0;
3536}
3537
3538/*===========================================================================
3539 * FUNCTION   : captureResultCb
3540 *
3541 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3542 *
3543 * PARAMETERS :
3544 *   @frame  : frame information from mm-camera-interface
3545 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3546 *   @userdata: userdata
3547 *
3548 * RETURN     : NONE
3549 *==========================================================================*/
3550void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3551                camera3_stream_buffer_t *buffer,
3552                uint32_t frame_number, void *userdata)
3553{
3554    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3555    if (hw == NULL) {
3556        ALOGE("%s: Invalid hw %p", __func__, hw);
3557        return;
3558    }
3559
3560    hw->captureResultCb(metadata, buffer, frame_number);
3561    return;
3562}
3563
3564
3565/*===========================================================================
3566 * FUNCTION   : initialize
3567 *
3568 * DESCRIPTION: Pass framework callback pointers to HAL
3569 *
3570 * PARAMETERS :
3571 *
3572 *
3573 * RETURN     : Success : 0
3574 *              Failure: -ENODEV
3575 *==========================================================================*/
3576
3577int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3578                                  const camera3_callback_ops_t *callback_ops)
3579{
3580    ALOGV("%s: E", __func__);
3581    QCamera3HardwareInterface *hw =
3582        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3583    if (!hw) {
3584        ALOGE("%s: NULL camera device", __func__);
3585        return -ENODEV;
3586    }
3587
3588    int rc = hw->initialize(callback_ops);
3589    ALOGV("%s: X", __func__);
3590    return rc;
3591}
3592
3593/*===========================================================================
3594 * FUNCTION   : configure_streams
3595 *
3596 * DESCRIPTION:
3597 *
3598 * PARAMETERS :
3599 *
3600 *
3601 * RETURN     : Success: 0
3602 *              Failure: -EINVAL (if stream configuration is invalid)
3603 *                       -ENODEV (fatal error)
3604 *==========================================================================*/
3605
3606int QCamera3HardwareInterface::configure_streams(
3607        const struct camera3_device *device,
3608        camera3_stream_configuration_t *stream_list)
3609{
3610    ALOGV("%s: E", __func__);
3611    QCamera3HardwareInterface *hw =
3612        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3613    if (!hw) {
3614        ALOGE("%s: NULL camera device", __func__);
3615        return -ENODEV;
3616    }
3617    int rc = hw->configureStreams(stream_list);
3618    ALOGV("%s: X", __func__);
3619    return rc;
3620}
3621
3622/*===========================================================================
3623 * FUNCTION   : register_stream_buffers
3624 *
3625 * DESCRIPTION: Register stream buffers with the device
3626 *
3627 * PARAMETERS :
3628 *
3629 * RETURN     :
3630 *==========================================================================*/
3631int QCamera3HardwareInterface::register_stream_buffers(
3632        const struct camera3_device *device,
3633        const camera3_stream_buffer_set_t *buffer_set)
3634{
3635    ALOGV("%s: E", __func__);
3636    QCamera3HardwareInterface *hw =
3637        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3638    if (!hw) {
3639        ALOGE("%s: NULL camera device", __func__);
3640        return -ENODEV;
3641    }
3642    int rc = hw->registerStreamBuffers(buffer_set);
3643    ALOGV("%s: X", __func__);
3644    return rc;
3645}
3646
3647/*===========================================================================
3648 * FUNCTION   : construct_default_request_settings
3649 *
3650 * DESCRIPTION: Configure a settings buffer to meet the required use case
3651 *
3652 * PARAMETERS :
3653 *
3654 *
3655 * RETURN     : Success: Return valid metadata
3656 *              Failure: Return NULL
3657 *==========================================================================*/
3658const camera_metadata_t* QCamera3HardwareInterface::
3659    construct_default_request_settings(const struct camera3_device *device,
3660                                        int type)
3661{
3662
3663    ALOGV("%s: E", __func__);
3664    camera_metadata_t* fwk_metadata = NULL;
3665    QCamera3HardwareInterface *hw =
3666        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3667    if (!hw) {
3668        ALOGE("%s: NULL camera device", __func__);
3669        return NULL;
3670    }
3671
3672    fwk_metadata = hw->translateCapabilityToMetadata(type);
3673
3674    ALOGV("%s: X", __func__);
3675    return fwk_metadata;
3676}
3677
3678/*===========================================================================
3679 * FUNCTION   : process_capture_request
3680 *
3681 * DESCRIPTION:
3682 *
3683 * PARAMETERS :
3684 *
3685 *
3686 * RETURN     :
3687 *==========================================================================*/
3688int QCamera3HardwareInterface::process_capture_request(
3689                    const struct camera3_device *device,
3690                    camera3_capture_request_t *request)
3691{
3692    ALOGV("%s: E", __func__);
3693    QCamera3HardwareInterface *hw =
3694        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3695    if (!hw) {
3696        ALOGE("%s: NULL camera device", __func__);
3697        return -EINVAL;
3698    }
3699
3700    int rc = hw->processCaptureRequest(request);
3701    ALOGV("%s: X", __func__);
3702    return rc;
3703}
3704
3705/*===========================================================================
3706 * FUNCTION   : get_metadata_vendor_tag_ops
3707 *
3708 * DESCRIPTION:
3709 *
3710 * PARAMETERS :
3711 *
3712 *
3713 * RETURN     :
3714 *==========================================================================*/
3715
3716void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3717                const struct camera3_device *device,
3718                vendor_tag_query_ops_t* ops)
3719{
3720    ALOGV("%s: E", __func__);
3721    QCamera3HardwareInterface *hw =
3722        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3723    if (!hw) {
3724        ALOGE("%s: NULL camera device", __func__);
3725        return;
3726    }
3727
3728    hw->getMetadataVendorTagOps(ops);
3729    ALOGV("%s: X", __func__);
3730    return;
3731}
3732
3733/*===========================================================================
3734 * FUNCTION   : dump
3735 *
3736 * DESCRIPTION:
3737 *
3738 * PARAMETERS :
3739 *
3740 *
3741 * RETURN     :
3742 *==========================================================================*/
3743
3744void QCamera3HardwareInterface::dump(
3745                const struct camera3_device *device, int fd)
3746{
3747    ALOGV("%s: E", __func__);
3748    QCamera3HardwareInterface *hw =
3749        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3750    if (!hw) {
3751        ALOGE("%s: NULL camera device", __func__);
3752        return;
3753    }
3754
3755    hw->dump(fd);
3756    ALOGV("%s: X", __func__);
3757    return;
3758}
3759
3760/*===========================================================================
3761 * FUNCTION   : flush
3762 *
3763 * DESCRIPTION:
3764 *
3765 * PARAMETERS :
3766 *
3767 *
3768 * RETURN     :
3769 *==========================================================================*/
3770
3771int QCamera3HardwareInterface::flush(
3772                const struct camera3_device *device)
3773{
3774    int rc;
3775    ALOGV("%s: E", __func__);
3776    QCamera3HardwareInterface *hw =
3777        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3778    if (!hw) {
3779        ALOGE("%s: NULL camera device", __func__);
3780        return -EINVAL;
3781    }
3782
3783    rc = hw->flush();
3784    ALOGV("%s: X", __func__);
3785    return rc;
3786}
3787
3788/*===========================================================================
3789 * FUNCTION   : close_camera_device
3790 *
3791 * DESCRIPTION:
3792 *
3793 * PARAMETERS :
3794 *
3795 *
3796 * RETURN     :
3797 *==========================================================================*/
3798int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3799{
3800    ALOGV("%s: E", __func__);
3801    int ret = NO_ERROR;
3802    QCamera3HardwareInterface *hw =
3803        reinterpret_cast<QCamera3HardwareInterface *>(
3804            reinterpret_cast<camera3_device_t *>(device)->priv);
3805    if (!hw) {
3806        ALOGE("NULL camera device");
3807        return BAD_VALUE;
3808    }
3809    delete hw;
3810
3811    pthread_mutex_lock(&mCameraSessionLock);
3812    mCameraSessionActive = 0;
3813    pthread_mutex_unlock(&mCameraSessionLock);
3814    ALOGV("%s: X", __func__);
3815    return ret;
3816}
3817
3818/*===========================================================================
3819 * FUNCTION   : getWaveletDenoiseProcessPlate
3820 *
3821 * DESCRIPTION: query wavelet denoise process plate
3822 *
3823 * PARAMETERS : None
3824 *
3825 * RETURN     : WNR prcocess plate vlaue
3826 *==========================================================================*/
3827cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3828{
3829    char prop[PROPERTY_VALUE_MAX];
3830    memset(prop, 0, sizeof(prop));
3831    property_get("persist.denoise.process.plates", prop, "0");
3832    int processPlate = atoi(prop);
3833    switch(processPlate) {
3834    case 0:
3835        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3836    case 1:
3837        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3838    case 2:
3839        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3840    case 3:
3841        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3842    default:
3843        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3844    }
3845}
3846
3847/*===========================================================================
3848 * FUNCTION   : needRotationReprocess
3849 *
3850 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3851 *
3852 * PARAMETERS : none
3853 *
3854 * RETURN     : true: needed
3855 *              false: no need
3856 *==========================================================================*/
3857bool QCamera3HardwareInterface::needRotationReprocess()
3858{
3859
3860    if (!mJpegSettings->is_jpeg_format) {
3861        // RAW image, no need to reprocess
3862        return false;
3863    }
3864
3865    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3866        mJpegSettings->jpeg_orientation > 0) {
3867        // current rotation is not zero, and pp has the capability to process rotation
3868        ALOGD("%s: need do reprocess for rotation", __func__);
3869        return true;
3870    }
3871
3872    return false;
3873}
3874
3875/*===========================================================================
3876 * FUNCTION   : needReprocess
3877 *
3878 * DESCRIPTION: if reprocess in needed
3879 *
3880 * PARAMETERS : none
3881 *
3882 * RETURN     : true: needed
3883 *              false: no need
3884 *==========================================================================*/
3885bool QCamera3HardwareInterface::needReprocess()
3886{
3887    if (!mJpegSettings->is_jpeg_format) {
3888        // RAW image, no need to reprocess
3889        return false;
3890    }
3891
3892    if ((mJpegSettings->min_required_pp_mask > 0) ||
3893         isWNREnabled()) {
3894        // TODO: add for ZSL HDR later
3895        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3896        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3897        return true;
3898    }
3899    return needRotationReprocess();
3900}
3901
3902/*===========================================================================
3903 * FUNCTION   : addOnlineReprocChannel
3904 *
3905 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3906 *              coming from input channel
3907 *
3908 * PARAMETERS :
3909 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3910 *
3911 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3912 *==========================================================================*/
3913QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3914              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3915{
3916    int32_t rc = NO_ERROR;
3917    QCamera3ReprocessChannel *pChannel = NULL;
3918    if (pInputChannel == NULL) {
3919        ALOGE("%s: input channel obj is NULL", __func__);
3920        return NULL;
3921    }
3922
3923    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3924            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3925    if (NULL == pChannel) {
3926        ALOGE("%s: no mem for reprocess channel", __func__);
3927        return NULL;
3928    }
3929
3930    // Capture channel, only need snapshot and postview streams start together
3931    mm_camera_channel_attr_t attr;
3932    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3933    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3934    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3935    rc = pChannel->initialize();
3936    if (rc != NO_ERROR) {
3937        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3938        delete pChannel;
3939        return NULL;
3940    }
3941
3942    // pp feature config
3943    cam_pp_feature_config_t pp_config;
3944    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3945    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3946        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3947        pp_config.sharpness = 10;
3948    }
3949
3950    if (isWNREnabled()) {
3951        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3952        pp_config.denoise2d.denoise_enable = 1;
3953        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3954    }
3955    if (needRotationReprocess()) {
3956        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3957        int rotation = mJpegSettings->jpeg_orientation;
3958        if (rotation == 0) {
3959            pp_config.rotation = ROTATE_0;
3960        } else if (rotation == 90) {
3961            pp_config.rotation = ROTATE_90;
3962        } else if (rotation == 180) {
3963            pp_config.rotation = ROTATE_180;
3964        } else if (rotation == 270) {
3965            pp_config.rotation = ROTATE_270;
3966        }
3967    }
3968
3969   rc = pChannel->addReprocStreamsFromSource(pp_config,
3970                                             pInputChannel,
3971                                             mMetadataChannel);
3972
3973    if (rc != NO_ERROR) {
3974        delete pChannel;
3975        return NULL;
3976    }
3977    return pChannel;
3978}
3979
3980int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3981{
3982    return gCamCapability[mCameraId]->min_num_pp_bufs;
3983}
3984
3985bool QCamera3HardwareInterface::isWNREnabled() {
3986    return gCamCapability[mCameraId]->isWnrSupported;
3987}
3988
3989}; //end namespace qcamera
3990