QCamera3HWI.cpp revision 13c48518ed9ba7e67d249841de56e51618e090a0
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48
49#define MAX(a, b) ((a) > (b) ? (a) : (b))
50
51#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
52cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
53parm_buffer_t *prevSettings;
54const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
55
56pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
57    PTHREAD_MUTEX_INITIALIZER;
58unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
59
60const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
61    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
62    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
63    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
64    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
65    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
66    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
67    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
68    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
70};
71
72const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
73    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
74    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
75    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
76    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
77    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
78    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
79    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
81    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
82};
83
84const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
85    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
86    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
88    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
90    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
91    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
92    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
93    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
94    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
95    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
96    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
97    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
98    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
99    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
100};
101
102const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
103    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
104    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
105    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
106    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
107    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
116};
117
118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
119    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
120    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
122    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
124};
125
126const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
127    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
128    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
129    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
130};
131
132const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
133    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
135};
136
137const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
138                                             320, 240, 176, 144, 0, 0};
139
140camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
141    initialize:                         QCamera3HardwareInterface::initialize,
142    configure_streams:                  QCamera3HardwareInterface::configure_streams,
143    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
144    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
145    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
146    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
147    dump:                               QCamera3HardwareInterface::dump,
148};
149
150
151/*===========================================================================
152 * FUNCTION   : QCamera3HardwareInterface
153 *
154 * DESCRIPTION: constructor of QCamera3HardwareInterface
155 *
156 * PARAMETERS :
157 *   @cameraId  : camera ID
158 *
159 * RETURN     : none
160 *==========================================================================*/
161QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
162    : mCameraId(cameraId),
163      mCameraHandle(NULL),
164      mCameraOpened(false),
165      mCameraInitialized(false),
166      mCallbackOps(NULL),
167      mInputStream(NULL),
168      mMetadataChannel(NULL),
169      mPictureChannel(NULL),
170      mFirstRequest(false),
171      mParamHeap(NULL),
172      mParameters(NULL),
173      mJpegSettings(NULL),
174      mIsZslMode(false),
175      mMinProcessedFrameDuration(0),
176      mMinJpegFrameDuration(0),
177      mMinRawFrameDuration(0),
178      m_pPowerModule(NULL)
179{
180    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
181    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
182    mCameraDevice.common.close = close_camera_device;
183    mCameraDevice.ops = &mCameraOps;
184    mCameraDevice.priv = this;
185    gCamCapability[cameraId]->version = CAM_HAL_V3;
186    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
187    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
188    gCamCapability[cameraId]->min_num_pp_bufs = 3;
189
190    pthread_cond_init(&mRequestCond, NULL);
191    mPendingRequest = 0;
192    mCurrentRequestId = -1;
193    pthread_mutex_init(&mMutex, NULL);
194
195    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
196        mDefaultMetadata[i] = NULL;
197
198#ifdef HAS_MULTIMEDIA_HINTS
199    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
200        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
201    }
202#endif
203}
204
205/*===========================================================================
206 * FUNCTION   : ~QCamera3HardwareInterface
207 *
208 * DESCRIPTION: destructor of QCamera3HardwareInterface
209 *
210 * PARAMETERS : none
211 *
212 * RETURN     : none
213 *==========================================================================*/
214QCamera3HardwareInterface::~QCamera3HardwareInterface()
215{
216    ALOGV("%s: E", __func__);
217    /* We need to stop all streams before deleting any stream */
218        /*flush the metadata list*/
219    if (!mStoredMetadataList.empty()) {
220        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
221              m != mStoredMetadataList.end(); m++) {
222            mMetadataChannel->bufDone(m->meta_buf);
223            free(m->meta_buf);
224            m = mStoredMetadataList.erase(m);
225        }
226    }
227    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
228        it != mStreamInfo.end(); it++) {
229        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
230        if (channel)
231           channel->stop();
232    }
233    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
234        it != mStreamInfo.end(); it++) {
235        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
236        if (channel)
237            delete channel;
238        free (*it);
239    }
240
241    mPictureChannel = NULL;
242
243    if (mJpegSettings != NULL) {
244        free(mJpegSettings);
245        mJpegSettings = NULL;
246    }
247
248    /* Clean up all channels */
249    if (mCameraInitialized) {
250        mMetadataChannel->stop();
251        delete mMetadataChannel;
252        mMetadataChannel = NULL;
253        deinitParameters();
254    }
255
256    if (mCameraOpened)
257        closeCamera();
258
259    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
260        if (mDefaultMetadata[i])
261            free_camera_metadata(mDefaultMetadata[i]);
262
263    pthread_cond_destroy(&mRequestCond);
264
265    pthread_mutex_destroy(&mMutex);
266    ALOGV("%s: X", __func__);
267}
268
269/*===========================================================================
270 * FUNCTION   : openCamera
271 *
272 * DESCRIPTION: open camera
273 *
274 * PARAMETERS :
275 *   @hw_device  : double ptr for camera device struct
276 *
277 * RETURN     : int32_t type of status
278 *              NO_ERROR  -- success
279 *              none-zero failure code
280 *==========================================================================*/
281int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
282{
283    int rc = 0;
284    pthread_mutex_lock(&mCameraSessionLock);
285    if (mCameraSessionActive) {
286        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
287        pthread_mutex_unlock(&mCameraSessionLock);
288        return INVALID_OPERATION;
289    }
290
291    if (mCameraOpened) {
292        *hw_device = NULL;
293        return PERMISSION_DENIED;
294    }
295
296    rc = openCamera();
297    if (rc == 0) {
298        *hw_device = &mCameraDevice.common;
299        mCameraSessionActive = 1;
300    } else
301        *hw_device = NULL;
302
303#ifdef HAS_MULTIMEDIA_HINTS
304    if (rc == 0) {
305        if (m_pPowerModule) {
306            if (m_pPowerModule->powerHint) {
307                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
308                        (void *)"state=1");
309            }
310        }
311    }
312#endif
313    pthread_mutex_unlock(&mCameraSessionLock);
314    return rc;
315}
316
317/*===========================================================================
318 * FUNCTION   : openCamera
319 *
320 * DESCRIPTION: open camera
321 *
322 * PARAMETERS : none
323 *
324 * RETURN     : int32_t type of status
325 *              NO_ERROR  -- success
326 *              none-zero failure code
327 *==========================================================================*/
328int QCamera3HardwareInterface::openCamera()
329{
330    if (mCameraHandle) {
331        ALOGE("Failure: Camera already opened");
332        return ALREADY_EXISTS;
333    }
334    mCameraHandle = camera_open(mCameraId);
335    if (!mCameraHandle) {
336        ALOGE("camera_open failed.");
337        return UNKNOWN_ERROR;
338    }
339
340    mCameraOpened = true;
341
342    return NO_ERROR;
343}
344
345/*===========================================================================
346 * FUNCTION   : closeCamera
347 *
348 * DESCRIPTION: close camera
349 *
350 * PARAMETERS : none
351 *
352 * RETURN     : int32_t type of status
353 *              NO_ERROR  -- success
354 *              none-zero failure code
355 *==========================================================================*/
356int QCamera3HardwareInterface::closeCamera()
357{
358    int rc = NO_ERROR;
359
360    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
361    mCameraHandle = NULL;
362    mCameraOpened = false;
363
364#ifdef HAS_MULTIMEDIA_HINTS
365    if (rc == NO_ERROR) {
366        if (m_pPowerModule) {
367            if (m_pPowerModule->powerHint) {
368                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
369                        (void *)"state=0");
370            }
371        }
372    }
373#endif
374
375    return rc;
376}
377
378/*===========================================================================
379 * FUNCTION   : initialize
380 *
381 * DESCRIPTION: Initialize frameworks callback functions
382 *
383 * PARAMETERS :
384 *   @callback_ops : callback function to frameworks
385 *
386 * RETURN     :
387 *
388 *==========================================================================*/
389int QCamera3HardwareInterface::initialize(
390        const struct camera3_callback_ops *callback_ops)
391{
392    int rc;
393
394    pthread_mutex_lock(&mMutex);
395
396    rc = initParameters();
397    if (rc < 0) {
398        ALOGE("%s: initParamters failed %d", __func__, rc);
399       goto err1;
400    }
401    //Create metadata channel and initialize it
402    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
403                    mCameraHandle->ops, captureResultCb,
404                    &gCamCapability[mCameraId]->padding_info, this);
405    if (mMetadataChannel == NULL) {
406        ALOGE("%s: failed to allocate metadata channel", __func__);
407        rc = -ENOMEM;
408        goto err2;
409    }
410    rc = mMetadataChannel->initialize();
411    if (rc < 0) {
412        ALOGE("%s: metadata channel initialization failed", __func__);
413        goto err3;
414    }
415
416    mCallbackOps = callback_ops;
417
418    pthread_mutex_unlock(&mMutex);
419    mCameraInitialized = true;
420    return 0;
421
422err3:
423    delete mMetadataChannel;
424    mMetadataChannel = NULL;
425err2:
426    deinitParameters();
427err1:
428    pthread_mutex_unlock(&mMutex);
429    return rc;
430}
431
432/*===========================================================================
433 * FUNCTION   : configureStreams
434 *
435 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
436 *              and output streams.
437 *
438 * PARAMETERS :
439 *   @stream_list : streams to be configured
440 *
441 * RETURN     :
442 *
443 *==========================================================================*/
444int QCamera3HardwareInterface::configureStreams(
445        camera3_stream_configuration_t *streamList)
446{
447    int rc = 0;
448    mIsZslMode = false;
449    pthread_mutex_lock(&mMutex);
450    // Sanity check stream_list
451    if (streamList == NULL) {
452        ALOGE("%s: NULL stream configuration", __func__);
453        pthread_mutex_unlock(&mMutex);
454        return BAD_VALUE;
455    }
456
457    if (streamList->streams == NULL) {
458        ALOGE("%s: NULL stream list", __func__);
459        pthread_mutex_unlock(&mMutex);
460        return BAD_VALUE;
461    }
462
463    if (streamList->num_streams < 1) {
464        ALOGE("%s: Bad number of streams requested: %d", __func__,
465                streamList->num_streams);
466        pthread_mutex_unlock(&mMutex);
467        return BAD_VALUE;
468    }
469
470    camera3_stream_t *inputStream = NULL;
471    camera3_stream_t *jpegStream = NULL;
472    /* first invalidate all the steams in the mStreamList
473     * if they appear again, they will be validated */
474    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
475            it != mStreamInfo.end(); it++) {
476        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
477        channel->stop();
478        (*it)->status = INVALID;
479    }
480
481    for (size_t i = 0; i < streamList->num_streams; i++) {
482        camera3_stream_t *newStream = streamList->streams[i];
483        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
484                __func__, newStream->stream_type, newStream->format,
485                 newStream->width, newStream->height);
486        //if the stream is in the mStreamList validate it
487        bool stream_exists = false;
488        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
489                it != mStreamInfo.end(); it++) {
490            if ((*it)->stream == newStream) {
491                QCamera3Channel *channel =
492                    (QCamera3Channel*)(*it)->stream->priv;
493                stream_exists = true;
494                (*it)->status = RECONFIGURE;
495                /*delete the channel object associated with the stream because
496                  we need to reconfigure*/
497                delete channel;
498                (*it)->stream->priv = NULL;
499            }
500        }
501        if (!stream_exists) {
502            //new stream
503            stream_info_t* stream_info;
504            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
505            stream_info->stream = newStream;
506            stream_info->status = VALID;
507            stream_info->registered = 0;
508            mStreamInfo.push_back(stream_info);
509        }
510        if (newStream->stream_type == CAMERA3_STREAM_INPUT
511                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
512            if (inputStream != NULL) {
513                ALOGE("%s: Multiple input streams requested!", __func__);
514                pthread_mutex_unlock(&mMutex);
515                return BAD_VALUE;
516            }
517            inputStream = newStream;
518        }
519        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
520            jpegStream = newStream;
521        }
522    }
523    mInputStream = inputStream;
524
525    /*clean up invalid streams*/
526    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
527            it != mStreamInfo.end();) {
528        if(((*it)->status) == INVALID){
529            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
530            delete channel;
531            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
532            free(*it);
533            it = mStreamInfo.erase(it);
534        } else {
535            it++;
536        }
537    }
538
539    //mMetadataChannel->stop();
540
541    /* Allocate channel objects for the requested streams */
542    for (size_t i = 0; i < streamList->num_streams; i++) {
543        camera3_stream_t *newStream = streamList->streams[i];
544        if (newStream->priv == NULL) {
545            //New stream, construct channel
546            switch (newStream->stream_type) {
547            case CAMERA3_STREAM_INPUT:
548                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
549                break;
550            case CAMERA3_STREAM_BIDIRECTIONAL:
551                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
552                    GRALLOC_USAGE_HW_CAMERA_WRITE;
553                break;
554            case CAMERA3_STREAM_OUTPUT:
555                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
556                break;
557            default:
558                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
559                break;
560            }
561
562            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
563                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
564                QCamera3Channel *channel;
565                switch (newStream->format) {
566                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
567                case HAL_PIXEL_FORMAT_YCbCr_420_888:
568                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
569                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
570                        jpegStream) {
571                        uint32_t width = jpegStream->width;
572                        uint32_t height = jpegStream->height;
573                        mIsZslMode = true;
574                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
575                            mCameraHandle->ops, captureResultCb,
576                            &gCamCapability[mCameraId]->padding_info, this, newStream,
577                            width, height);
578                    } else
579                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
580                            mCameraHandle->ops, captureResultCb,
581                            &gCamCapability[mCameraId]->padding_info, this, newStream);
582                    if (channel == NULL) {
583                        ALOGE("%s: allocation of channel failed", __func__);
584                        pthread_mutex_unlock(&mMutex);
585                        return -ENOMEM;
586                    }
587
588                    newStream->priv = channel;
589                    break;
590                case HAL_PIXEL_FORMAT_BLOB:
591                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
592                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
593                            mCameraHandle->ops, captureResultCb,
594                            &gCamCapability[mCameraId]->padding_info, this, newStream);
595                    if (mPictureChannel == NULL) {
596                        ALOGE("%s: allocation of channel failed", __func__);
597                        pthread_mutex_unlock(&mMutex);
598                        return -ENOMEM;
599                    }
600                    newStream->priv = (QCamera3Channel*)mPictureChannel;
601                    break;
602
603                //TODO: Add support for app consumed format?
604                default:
605                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
606                    break;
607                }
608            }
609        } else {
610            // Channel already exists for this stream
611            // Do nothing for now
612        }
613    }
614    /*For the streams to be reconfigured we need to register the buffers
615      since the framework wont*/
616    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
617            it != mStreamInfo.end(); it++) {
618        if ((*it)->status == RECONFIGURE) {
619            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
620            /*only register buffers for streams that have already been
621              registered*/
622            if ((*it)->registered) {
623                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
624                        (*it)->buffer_set.buffers);
625                if (rc != NO_ERROR) {
626                    ALOGE("%s: Failed to register the buffers of old stream,\
627                            rc = %d", __func__, rc);
628                }
629                ALOGV("%s: channel %p has %d buffers",
630                        __func__, channel, (*it)->buffer_set.num_buffers);
631            }
632        }
633
634        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
635        if (index == NAME_NOT_FOUND) {
636            mPendingBuffersMap.add((*it)->stream, 0);
637        } else {
638            mPendingBuffersMap.editValueAt(index) = 0;
639        }
640    }
641
642    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
643    mPendingRequestsList.clear();
644
645    /*flush the metadata list*/
646    if (!mStoredMetadataList.empty()) {
647        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
648              m != mStoredMetadataList.end(); m++) {
649            mMetadataChannel->bufDone(m->meta_buf);
650            free(m->meta_buf);
651            m = mStoredMetadataList.erase(m);
652        }
653    }
654
655    //settings/parameters don't carry over for new configureStreams
656    memset(mParameters, 0, sizeof(parm_buffer_t));
657    mFirstRequest = true;
658
659    //Get min frame duration for this streams configuration
660    deriveMinFrameDuration();
661
662    pthread_mutex_unlock(&mMutex);
663    return rc;
664}
665
666/*===========================================================================
667 * FUNCTION   : validateCaptureRequest
668 *
669 * DESCRIPTION: validate a capture request from camera service
670 *
671 * PARAMETERS :
672 *   @request : request from framework to process
673 *
674 * RETURN     :
675 *
676 *==========================================================================*/
677int QCamera3HardwareInterface::validateCaptureRequest(
678                    camera3_capture_request_t *request)
679{
680    ssize_t idx = 0;
681    const camera3_stream_buffer_t *b;
682    CameraMetadata meta;
683
684    /* Sanity check the request */
685    if (request == NULL) {
686        ALOGE("%s: NULL capture request", __func__);
687        return BAD_VALUE;
688    }
689
690    uint32_t frameNumber = request->frame_number;
691    if (request->input_buffer != NULL &&
692            request->input_buffer->stream != mInputStream) {
693        ALOGE("%s: Request %d: Input buffer not from input stream!",
694                __FUNCTION__, frameNumber);
695        return BAD_VALUE;
696    }
697    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
698        ALOGE("%s: Request %d: No output buffers provided!",
699                __FUNCTION__, frameNumber);
700        return BAD_VALUE;
701    }
702    if (request->input_buffer != NULL) {
703        b = request->input_buffer;
704        QCamera3Channel *channel =
705            static_cast<QCamera3Channel*>(b->stream->priv);
706        if (channel == NULL) {
707            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
708                    __func__, frameNumber, idx);
709            return BAD_VALUE;
710        }
711        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
712            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
713                    __func__, frameNumber, idx);
714            return BAD_VALUE;
715        }
716        if (b->release_fence != -1) {
717            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
718                    __func__, frameNumber, idx);
719            return BAD_VALUE;
720        }
721        if (b->buffer == NULL) {
722            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
723                    __func__, frameNumber, idx);
724            return BAD_VALUE;
725        }
726    }
727
728    // Validate all buffers
729    b = request->output_buffers;
730    do {
731        QCamera3Channel *channel =
732                static_cast<QCamera3Channel*>(b->stream->priv);
733        if (channel == NULL) {
734            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
735                    __func__, frameNumber, idx);
736            return BAD_VALUE;
737        }
738        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
739            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
740                    __func__, frameNumber, idx);
741            return BAD_VALUE;
742        }
743        if (b->release_fence != -1) {
744            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
745                    __func__, frameNumber, idx);
746            return BAD_VALUE;
747        }
748        if (b->buffer == NULL) {
749            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
750                    __func__, frameNumber, idx);
751            return BAD_VALUE;
752        }
753        idx++;
754        b = request->output_buffers + idx;
755    } while (idx < (ssize_t)request->num_output_buffers);
756
757    return NO_ERROR;
758}
759
760/*===========================================================================
761 * FUNCTION   : deriveMinFrameDuration
762 *
763 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
764 *              on currently configured streams.
765 *
766 * PARAMETERS : NONE
767 *
768 * RETURN     : NONE
769 *
770 *==========================================================================*/
771void QCamera3HardwareInterface::deriveMinFrameDuration()
772{
773    int32_t maxJpegDimension, maxProcessedDimension;
774
775    maxJpegDimension = 0;
776    maxProcessedDimension = 0;
777
778    // Figure out maximum jpeg, processed, and raw dimensions
779    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
780        it != mStreamInfo.end(); it++) {
781
782        // Input stream doesn't have valid stream_type
783        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
784            continue;
785
786        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
787        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
788            if (dimension > maxJpegDimension)
789                maxJpegDimension = dimension;
790        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
791            if (dimension > maxProcessedDimension)
792                maxProcessedDimension = dimension;
793        }
794    }
795
796    //Assume all jpeg dimensions are in processed dimensions.
797    if (maxJpegDimension > maxProcessedDimension)
798        maxProcessedDimension = maxJpegDimension;
799
800    //Find minimum durations for processed, jpeg, and raw
801    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
802    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
803        if (maxProcessedDimension ==
804            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
805            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
806            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
807            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
808            break;
809        }
810    }
811}
812
813/*===========================================================================
814 * FUNCTION   : getMinFrameDuration
815 *
816 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
817 *              and current request configuration.
818 *
819 * PARAMETERS : @request: requset sent by the frameworks
820 *
821 * RETURN     : min farme duration for a particular request
822 *
823 *==========================================================================*/
824int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
825{
826    bool hasJpegStream = false;
827    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
828        const camera3_stream_t *stream = request->output_buffers[i].stream;
829        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
830            hasJpegStream = true;
831    }
832
833    if (!hasJpegStream)
834        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
835    else
836        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
837}
838
839/*===========================================================================
840 * FUNCTION   : registerStreamBuffers
841 *
842 * DESCRIPTION: Register buffers for a given stream with the HAL device.
843 *
844 * PARAMETERS :
845 *   @stream_list : streams to be configured
846 *
847 * RETURN     :
848 *
849 *==========================================================================*/
850int QCamera3HardwareInterface::registerStreamBuffers(
851        const camera3_stream_buffer_set_t *buffer_set)
852{
853    int rc = 0;
854
855    pthread_mutex_lock(&mMutex);
856
857    if (buffer_set == NULL) {
858        ALOGE("%s: Invalid buffer_set parameter.", __func__);
859        pthread_mutex_unlock(&mMutex);
860        return -EINVAL;
861    }
862    if (buffer_set->stream == NULL) {
863        ALOGE("%s: Invalid stream parameter.", __func__);
864        pthread_mutex_unlock(&mMutex);
865        return -EINVAL;
866    }
867    if (buffer_set->num_buffers < 1) {
868        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
869        pthread_mutex_unlock(&mMutex);
870        return -EINVAL;
871    }
872    if (buffer_set->buffers == NULL) {
873        ALOGE("%s: Invalid buffers parameter.", __func__);
874        pthread_mutex_unlock(&mMutex);
875        return -EINVAL;
876    }
877
878    camera3_stream_t *stream = buffer_set->stream;
879    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
880
881    //set the buffer_set in the mStreamInfo array
882    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
883            it != mStreamInfo.end(); it++) {
884        if ((*it)->stream == stream) {
885            uint32_t numBuffers = buffer_set->num_buffers;
886            (*it)->buffer_set.stream = buffer_set->stream;
887            (*it)->buffer_set.num_buffers = numBuffers;
888            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
889            if ((*it)->buffer_set.buffers == NULL) {
890                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
891                pthread_mutex_unlock(&mMutex);
892                return -ENOMEM;
893            }
894            for (size_t j = 0; j < numBuffers; j++){
895                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
896            }
897            (*it)->registered = 1;
898        }
899    }
900    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
901    if (rc < 0) {
902        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
903        pthread_mutex_unlock(&mMutex);
904        return -ENODEV;
905    }
906
907    pthread_mutex_unlock(&mMutex);
908    return NO_ERROR;
909}
910
911/*===========================================================================
912 * FUNCTION   : processCaptureRequest
913 *
914 * DESCRIPTION: process a capture request from camera service
915 *
916 * PARAMETERS :
917 *   @request : request from framework to process
918 *
919 * RETURN     :
920 *
921 *==========================================================================*/
922int QCamera3HardwareInterface::processCaptureRequest(
923                    camera3_capture_request_t *request)
924{
925    int rc = NO_ERROR;
926    int32_t request_id;
927    CameraMetadata meta;
928    MetadataBufferInfo reproc_meta;
929    int queueMetadata = 0;
930
931    pthread_mutex_lock(&mMutex);
932
933    // For first capture request, stream on all streams
934    if (mFirstRequest) {
935        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
936            it != mStreamInfo.end(); it++) {
937            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
938            channel->start();
939        }
940    }
941
942    rc = validateCaptureRequest(request);
943    if (rc != NO_ERROR) {
944        ALOGE("%s: incoming request is not valid", __func__);
945        pthread_mutex_unlock(&mMutex);
946        return rc;
947    }
948
949    uint32_t frameNumber = request->frame_number;
950    uint32_t streamTypeMask = 0;
951
952    meta = request->settings;
953    if (meta.exists(ANDROID_REQUEST_ID)) {
954        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
955        mCurrentRequestId = request_id;
956        ALOGV("%s: Received request with id: %d",__func__, request_id);
957    } else if (mFirstRequest || mCurrentRequestId == -1){
958        ALOGE("%s: Unable to find request id field, \
959                & no previous id available", __func__);
960        return NAME_NOT_FOUND;
961    } else {
962        ALOGV("%s: Re-using old request id", __func__);
963        request_id = mCurrentRequestId;
964    }
965
966    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
967                                    __func__, __LINE__,
968                                    request->num_output_buffers,
969                                    request->input_buffer,
970                                    frameNumber);
971    // Acquire all request buffers first
972    int blob_request = 0;
973    for (size_t i = 0; i < request->num_output_buffers; i++) {
974        const camera3_stream_buffer_t& output = request->output_buffers[i];
975        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
976        sp<Fence> acquireFence = new Fence(output.acquire_fence);
977
978        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
979        //Call function to store local copy of jpeg data for encode params.
980            blob_request = 1;
981            rc = getJpegSettings(request->settings);
982            if (rc < 0) {
983                ALOGE("%s: failed to get jpeg parameters", __func__);
984                pthread_mutex_unlock(&mMutex);
985                return rc;
986            }
987        }
988
989        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
990        if (rc != OK) {
991            ALOGE("%s: fence wait failed %d", __func__, rc);
992            pthread_mutex_unlock(&mMutex);
993            return rc;
994        }
995        streamTypeMask |= channel->getStreamTypeMask();
996    }
997
998    rc = setFrameParameters(request, streamTypeMask);
999    if (rc < 0) {
1000        ALOGE("%s: fail to set frame parameters", __func__);
1001        pthread_mutex_unlock(&mMutex);
1002        return rc;
1003    }
1004
1005    /* Update pending request list and pending buffers map */
1006    PendingRequestInfo pendingRequest;
1007    pendingRequest.frame_number = frameNumber;
1008    pendingRequest.num_buffers = request->num_output_buffers;
1009    pendingRequest.request_id = request_id;
1010    pendingRequest.blob_request = blob_request;
1011    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1012
1013    for (size_t i = 0; i < request->num_output_buffers; i++) {
1014        RequestedBufferInfo requestedBuf;
1015        requestedBuf.stream = request->output_buffers[i].stream;
1016        requestedBuf.buffer = NULL;
1017        pendingRequest.buffers.push_back(requestedBuf);
1018
1019        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1020    }
1021    mPendingRequestsList.push_back(pendingRequest);
1022
1023    // Notify metadata channel we receive a request
1024    mMetadataChannel->request(NULL, frameNumber);
1025
1026    // Call request on other streams
1027    for (size_t i = 0; i < request->num_output_buffers; i++) {
1028        const camera3_stream_buffer_t& output = request->output_buffers[i];
1029        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1030        mm_camera_buf_def_t *pInputBuffer = NULL;
1031
1032        if (channel == NULL) {
1033            ALOGE("%s: invalid channel pointer for stream", __func__);
1034            continue;
1035        }
1036
1037        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1038            QCamera3RegularChannel* inputChannel = NULL;
1039            if(request->input_buffer != NULL){
1040                //Try to get the internal format
1041                inputChannel = (QCamera3RegularChannel*)
1042                    request->input_buffer->stream->priv;
1043                if(inputChannel == NULL ){
1044                    ALOGE("%s: failed to get input channel handle", __func__);
1045                } else {
1046                    pInputBuffer =
1047                        inputChannel->getInternalFormatBuffer(
1048                                request->input_buffer->buffer);
1049                    ALOGD("%s: Input buffer dump",__func__);
1050                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1051                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1052                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1053                    //TODO: need to get corresponding metadata and send it to pproc
1054                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1055                         m != mStoredMetadataList.end(); m++) {
1056                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1057                            reproc_meta.meta_buf = m->meta_buf;
1058                            m = mStoredMetadataList.erase(m);
1059                            queueMetadata = 1;
1060                            break;
1061                        }
1062                    }
1063                }
1064            }
1065            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1066                            pInputBuffer,(QCamera3Channel*)inputChannel);
1067            if (queueMetadata) {
1068                mPictureChannel->queueMetadata(reproc_meta.meta_buf);
1069            }
1070        } else {
1071            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1072                __LINE__, output.buffer, frameNumber);
1073            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1074                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1075                     m != mStoredMetadataList.end(); m++) {
1076                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1077                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1078                            mMetadataChannel->bufDone(m->meta_buf);
1079                            free(m->meta_buf);
1080                            m = mStoredMetadataList.erase(m);
1081                            break;
1082                        }
1083                   }
1084                }
1085            }
1086            rc = channel->request(output.buffer, frameNumber);
1087        }
1088        if (rc < 0)
1089            ALOGE("%s: request failed", __func__);
1090    }
1091
1092    mFirstRequest = false;
1093
1094    //Block on conditional variable
1095    mPendingRequest = 1;
1096    while (mPendingRequest == 1) {
1097        pthread_cond_wait(&mRequestCond, &mMutex);
1098    }
1099
1100    pthread_mutex_unlock(&mMutex);
1101    return rc;
1102}
1103
1104/*===========================================================================
1105 * FUNCTION   : getMetadataVendorTagOps
1106 *
1107 * DESCRIPTION:
1108 *
1109 * PARAMETERS :
1110 *
1111 *
1112 * RETURN     :
1113 *==========================================================================*/
1114void QCamera3HardwareInterface::getMetadataVendorTagOps(
1115                    vendor_tag_query_ops_t* /*ops*/)
1116{
1117    /* Enable locks when we eventually add Vendor Tags */
1118    /*
1119    pthread_mutex_lock(&mMutex);
1120
1121    pthread_mutex_unlock(&mMutex);
1122    */
1123    return;
1124}
1125
1126/*===========================================================================
1127 * FUNCTION   : dump
1128 *
1129 * DESCRIPTION:
1130 *
1131 * PARAMETERS :
1132 *
1133 *
1134 * RETURN     :
1135 *==========================================================================*/
1136void QCamera3HardwareInterface::dump(int /*fd*/)
1137{
1138    /*Enable lock when we implement this function*/
1139    /*
1140    pthread_mutex_lock(&mMutex);
1141
1142    pthread_mutex_unlock(&mMutex);
1143    */
1144    return;
1145}
1146
1147
1148/*===========================================================================
1149 * FUNCTION   : captureResultCb
1150 *
1151 * DESCRIPTION: Callback handler for all capture result
1152 *              (streams, as well as metadata)
1153 *
1154 * PARAMETERS :
1155 *   @metadata : metadata information
1156 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1157 *               NULL if metadata.
1158 *
1159 * RETURN     : NONE
1160 *==========================================================================*/
1161void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1162                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1163{
1164    pthread_mutex_lock(&mMutex);
1165
1166    if (metadata_buf) {
1167        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1168        int32_t frame_number_valid = *(int32_t *)
1169            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1170        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1171            CAM_INTF_META_PENDING_REQUESTS, metadata);
1172        uint32_t frame_number = *(uint32_t *)
1173            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1174        const struct timeval *tv = (const struct timeval *)
1175            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1176        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1177            tv->tv_usec * NSEC_PER_USEC;
1178
1179        if (!frame_number_valid) {
1180            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1181            mMetadataChannel->bufDone(metadata_buf);
1182            goto done_metadata;
1183        }
1184        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1185                frame_number, capture_time);
1186
1187        // Go through the pending requests info and send shutter/results to frameworks
1188        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1189                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1190            camera3_capture_result_t result;
1191            camera3_notify_msg_t notify_msg;
1192            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1193
1194            // Flush out all entries with less or equal frame numbers.
1195
1196            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1197            //Right now it's the same as metadata timestamp
1198
1199            //TODO: When there is metadata drop, how do we derive the timestamp of
1200            //dropped frames? For now, we fake the dropped timestamp by substracting
1201            //from the reported timestamp
1202            nsecs_t current_capture_time = capture_time -
1203                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1204
1205            // Send shutter notify to frameworks
1206            notify_msg.type = CAMERA3_MSG_SHUTTER;
1207            notify_msg.message.shutter.frame_number = i->frame_number;
1208            notify_msg.message.shutter.timestamp = current_capture_time;
1209            mCallbackOps->notify(mCallbackOps, &notify_msg);
1210            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1211                    i->frame_number, capture_time);
1212
1213            // Send empty metadata with already filled buffers for dropped metadata
1214            // and send valid metadata with already filled buffers for current metadata
1215            if (i->frame_number < frame_number) {
1216                CameraMetadata dummyMetadata;
1217                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1218                        &current_capture_time, 1);
1219                dummyMetadata.update(ANDROID_REQUEST_ID,
1220                        &(i->request_id), 1);
1221                result.result = dummyMetadata.release();
1222            } else {
1223                result.result = translateCbMetadataToResultMetadata(metadata,
1224                        current_capture_time, i->request_id);
1225                if (mIsZslMode) {
1226                   int found_metadata = 0;
1227                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1228                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1229                        j != i->buffers.end(); j++) {
1230                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1231                         //check if corresp. zsl already exists in the stored metadata list
1232                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1233                               m != mStoredMetadataList.begin(); m++) {
1234                            if (m->frame_number == frame_number) {
1235                               m->meta_buf = metadata_buf;
1236                               found_metadata = 1;
1237                               break;
1238                            }
1239                         }
1240                         if (!found_metadata) {
1241                            MetadataBufferInfo store_meta_info;
1242                            store_meta_info.meta_buf = metadata_buf;
1243                            store_meta_info.frame_number = frame_number;
1244                            mStoredMetadataList.push_back(store_meta_info);
1245                            found_metadata = 1;
1246                         }
1247                      }
1248                   }
1249                   if (!found_metadata) {
1250                       if (!i->input_buffer_present && i->blob_request) {
1251                          //livesnapshot or fallback non-zsl snapshot case
1252                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1253                                j != i->buffers.end(); j++){
1254                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1255                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1256                                 mPictureChannel->queueMetadata(metadata_buf);
1257                                 break;
1258                              }
1259                         }
1260                       } else {
1261                            //return the metadata immediately
1262                            mMetadataChannel->bufDone(metadata_buf);
1263                            free(metadata_buf);
1264                       }
1265                   }
1266               } else if (!mIsZslMode && i->blob_request) {
1267                   //If it is a blob request then send the metadata to the picture channel
1268                   mPictureChannel->queueMetadata(metadata_buf);
1269               } else {
1270                   // Return metadata buffer
1271                   mMetadataChannel->bufDone(metadata_buf);
1272                   free(metadata_buf);
1273               }
1274
1275            }
1276            if (!result.result) {
1277                ALOGE("%s: metadata is NULL", __func__);
1278            }
1279            result.frame_number = i->frame_number;
1280            result.num_output_buffers = 0;
1281            result.output_buffers = NULL;
1282            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1283                    j != i->buffers.end(); j++) {
1284                if (j->buffer) {
1285                    result.num_output_buffers++;
1286                }
1287            }
1288
1289            if (result.num_output_buffers > 0) {
1290                camera3_stream_buffer_t *result_buffers =
1291                    new camera3_stream_buffer_t[result.num_output_buffers];
1292                if (!result_buffers) {
1293                    ALOGE("%s: Fatal error: out of memory", __func__);
1294                }
1295                size_t result_buffers_idx = 0;
1296                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1297                        j != i->buffers.end(); j++) {
1298                    if (j->buffer) {
1299                        result_buffers[result_buffers_idx++] = *(j->buffer);
1300                        free(j->buffer);
1301                        j->buffer = NULL;
1302                        mPendingBuffersMap.editValueFor(j->stream)--;
1303                    }
1304                }
1305                result.output_buffers = result_buffers;
1306
1307                mCallbackOps->process_capture_result(mCallbackOps, &result);
1308                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1309                        __func__, result.frame_number, current_capture_time);
1310                free_camera_metadata((camera_metadata_t *)result.result);
1311                delete[] result_buffers;
1312            } else {
1313                mCallbackOps->process_capture_result(mCallbackOps, &result);
1314                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1315                        __func__, result.frame_number, current_capture_time);
1316                free_camera_metadata((camera_metadata_t *)result.result);
1317            }
1318            // erase the element from the list
1319            i = mPendingRequestsList.erase(i);
1320        }
1321
1322
1323done_metadata:
1324        bool max_buffers_dequeued = false;
1325        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1326            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1327            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1328            if (queued_buffers == stream->max_buffers) {
1329                max_buffers_dequeued = true;
1330                break;
1331            }
1332        }
1333        if (!max_buffers_dequeued && !pending_requests) {
1334            // Unblock process_capture_request
1335            mPendingRequest = 0;
1336            pthread_cond_signal(&mRequestCond);
1337        }
1338    } else {
1339        // If the frame number doesn't exist in the pending request list,
1340        // directly send the buffer to the frameworks, and update pending buffers map
1341        // Otherwise, book-keep the buffer.
1342        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1343        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1344            i++;
1345        }
1346        if (i == mPendingRequestsList.end()) {
1347            // Verify all pending requests frame_numbers are greater
1348            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1349                    j != mPendingRequestsList.end(); j++) {
1350                if (j->frame_number < frame_number) {
1351                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1352                            __func__, j->frame_number, frame_number);
1353                }
1354            }
1355            camera3_capture_result_t result;
1356            result.result = NULL;
1357            result.frame_number = frame_number;
1358            result.num_output_buffers = 1;
1359            result.output_buffers = buffer;
1360            ALOGV("%s: result frame_number = %d, buffer = %p",
1361                    __func__, frame_number, buffer);
1362            mPendingBuffersMap.editValueFor(buffer->stream)--;
1363            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1364                int found = 0;
1365                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1366                      k != mStoredMetadataList.end(); k++) {
1367                    if (k->frame_number == frame_number) {
1368                        k->zsl_buf_hdl = buffer->buffer;
1369                        found = 1;
1370                        break;
1371                    }
1372                }
1373                if (!found) {
1374                   MetadataBufferInfo meta_info;
1375                   meta_info.frame_number = frame_number;
1376                   meta_info.zsl_buf_hdl = buffer->buffer;
1377                   mStoredMetadataList.push_back(meta_info);
1378                }
1379            }
1380            mCallbackOps->process_capture_result(mCallbackOps, &result);
1381        } else {
1382            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1383                    j != i->buffers.end(); j++) {
1384                if (j->stream == buffer->stream) {
1385                    if (j->buffer != NULL) {
1386                        ALOGE("%s: Error: buffer is already set", __func__);
1387                    } else {
1388                        j->buffer = (camera3_stream_buffer_t *)malloc(
1389                                sizeof(camera3_stream_buffer_t));
1390                        *(j->buffer) = *buffer;
1391                        ALOGV("%s: cache buffer %p at result frame_number %d",
1392                                __func__, buffer, frame_number);
1393                    }
1394                }
1395            }
1396        }
1397    }
1398    pthread_mutex_unlock(&mMutex);
1399    return;
1400}
1401
1402/*===========================================================================
1403 * FUNCTION   : translateCbMetadataToResultMetadata
1404 *
1405 * DESCRIPTION:
1406 *
1407 * PARAMETERS :
1408 *   @metadata : metadata information from callback
1409 *
1410 * RETURN     : camera_metadata_t*
1411 *              metadata in a format specified by fwk
1412 *==========================================================================*/
1413camera_metadata_t*
1414QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1415                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1416                                 int32_t request_id)
1417{
1418    CameraMetadata camMetadata;
1419    camera_metadata_t* resultMetadata;
1420
1421    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1422    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1423
1424    /*CAM_INTF_META_HISTOGRAM - TODO*/
1425    /*cam_hist_stats_t  *histogram =
1426      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1427      metadata);*/
1428
1429    /*face detection*/
1430    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1431        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1432    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1433    int32_t faceIds[numFaces];
1434    uint8_t faceScores[numFaces];
1435    int32_t faceRectangles[numFaces * 4];
1436    int32_t faceLandmarks[numFaces * 6];
1437    int j = 0, k = 0;
1438    for (int i = 0; i < numFaces; i++) {
1439        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1440        faceScores[i] = faceDetectionInfo->faces[i].score;
1441        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1442                faceRectangles+j, -1);
1443        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1444        j+= 4;
1445        k+= 6;
1446    }
1447    if (numFaces > 0) {
1448        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1449        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1450        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1451            faceRectangles, numFaces*4);
1452        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1453            faceLandmarks, numFaces*6);
1454    }
1455
1456    uint8_t  *color_correct_mode =
1457        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1458    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1459
1460    int32_t  *ae_precapture_id =
1461        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1462    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1463
1464    /*aec regions*/
1465    cam_area_t  *hAeRegions =
1466        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1467    int32_t aeRegions[5];
1468    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1469    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1470
1471    uint8_t *ae_state =
1472            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1473    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1474
1475    uint8_t  *focusMode =
1476        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1477    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1478
1479    /*af regions*/
1480    cam_area_t  *hAfRegions =
1481        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1482    int32_t afRegions[5];
1483    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1484    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1485
1486    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1487    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1488
1489    int32_t  *afTriggerId =
1490        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1491    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1492
1493    uint8_t  *whiteBalance =
1494        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1495    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1496
1497    /*awb regions*/
1498    cam_area_t  *hAwbRegions =
1499        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1500    int32_t awbRegions[5];
1501    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1502    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1503
1504    uint8_t  *whiteBalanceState =
1505        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1506    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1507
1508    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1509    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1510
1511    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1512    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1513
1514    uint8_t  *flashPower =
1515        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1516    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1517
1518    int64_t  *flashFiringTime =
1519        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1520    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1521
1522    /*int32_t  *ledMode =
1523      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1524      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1525
1526    uint8_t  *flashState =
1527        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1528    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1529
1530    uint8_t  *hotPixelMode =
1531        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1532    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1533
1534    float  *lensAperture =
1535        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1536    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1537
1538    float  *filterDensity =
1539        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1540    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1541
1542    float  *focalLength =
1543        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1544    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1545
1546    float  *focusDistance =
1547        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1548    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1549
1550    float  *focusRange =
1551        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1552    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1553
1554    uint8_t  *opticalStab =
1555        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1556    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1557
1558    /*int32_t  *focusState =
1559      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1560      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1561
1562    uint8_t  *noiseRedMode =
1563        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1564    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1565
1566    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1567
1568    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1569        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1570    int32_t scalerCropRegion[4];
1571    scalerCropRegion[0] = hScalerCropRegion->left;
1572    scalerCropRegion[1] = hScalerCropRegion->top;
1573    scalerCropRegion[2] = hScalerCropRegion->width;
1574    scalerCropRegion[3] = hScalerCropRegion->height;
1575    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1576
1577    int64_t  *sensorExpTime =
1578        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1579    mMetadataResponse.exposure_time = *sensorExpTime;
1580    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1581    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1582
1583    int64_t  *sensorFameDuration =
1584        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1585    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1586    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1587
1588    int32_t  *sensorSensitivity =
1589        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1590    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1591    mMetadataResponse.iso_speed = *sensorSensitivity;
1592    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1593
1594    uint8_t  *shadingMode =
1595        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1596    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1597
1598    uint8_t  *faceDetectMode =
1599        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1600    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1601        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1602        *faceDetectMode);
1603    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1604
1605    uint8_t  *histogramMode =
1606        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1607    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1608
1609    uint8_t  *sharpnessMapMode =
1610        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1611    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1612            sharpnessMapMode, 1);
1613
1614    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1615    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1616        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1617    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1618            (int32_t*)sharpnessMap->sharpness,
1619            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1620
1621    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1622        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1623    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1624    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1625    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1626                       (float*)lensShadingMap->lens_shading,
1627                       4*map_width*map_height);
1628
1629    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1630        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1631    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1632
1633    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1634        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1635    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1636                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1637
1638    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1639        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1640    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1641                       predColorCorrectionGains->gains, 4);
1642
1643    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1644        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1645    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1646                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1647
1648    uint8_t *blackLevelLock = (uint8_t*)
1649        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1650    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1651
1652    uint8_t *sceneFlicker = (uint8_t*)
1653        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1654    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1655
1656
1657    resultMetadata = camMetadata.release();
1658    return resultMetadata;
1659}
1660
1661/*===========================================================================
1662 * FUNCTION   : convertToRegions
1663 *
1664 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1665 *
1666 * PARAMETERS :
1667 *   @rect   : cam_rect_t struct to convert
1668 *   @region : int32_t destination array
1669 *   @weight : if we are converting from cam_area_t, weight is valid
1670 *             else weight = -1
1671 *
1672 *==========================================================================*/
1673void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1674    region[0] = rect.left;
1675    region[1] = rect.top;
1676    region[2] = rect.left + rect.width;
1677    region[3] = rect.top + rect.height;
1678    if (weight > -1) {
1679        region[4] = weight;
1680    }
1681}
1682
1683/*===========================================================================
1684 * FUNCTION   : convertFromRegions
1685 *
1686 * DESCRIPTION: helper method to convert from array to cam_rect_t
1687 *
1688 * PARAMETERS :
1689 *   @rect   : cam_rect_t struct to convert
1690 *   @region : int32_t destination array
1691 *   @weight : if we are converting from cam_area_t, weight is valid
1692 *             else weight = -1
1693 *
1694 *==========================================================================*/
1695void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1696                                                   const camera_metadata_t *settings,
1697                                                   uint32_t tag){
1698    CameraMetadata frame_settings;
1699    frame_settings = settings;
1700    int32_t x_min = frame_settings.find(tag).data.i32[0];
1701    int32_t y_min = frame_settings.find(tag).data.i32[1];
1702    int32_t x_max = frame_settings.find(tag).data.i32[2];
1703    int32_t y_max = frame_settings.find(tag).data.i32[3];
1704    roi->weight = frame_settings.find(tag).data.i32[4];
1705    roi->rect.left = x_min;
1706    roi->rect.top = y_min;
1707    roi->rect.width = x_max - x_min;
1708    roi->rect.height = y_max - y_min;
1709}
1710
1711/*===========================================================================
1712 * FUNCTION   : resetIfNeededROI
1713 *
1714 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1715 *              crop region
1716 *
1717 * PARAMETERS :
1718 *   @roi       : cam_area_t struct to resize
1719 *   @scalerCropRegion : cam_crop_region_t region to compare against
1720 *
1721 *
1722 *==========================================================================*/
1723bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1724                                                 const cam_crop_region_t* scalerCropRegion)
1725{
1726    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1727    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1728    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1729    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1730    if ((roi_x_max < scalerCropRegion->left) ||
1731        (roi_y_max < scalerCropRegion->top)  ||
1732        (roi->rect.left > crop_x_max) ||
1733        (roi->rect.top > crop_y_max)){
1734        return false;
1735    }
1736    if (roi->rect.left < scalerCropRegion->left) {
1737        roi->rect.left = scalerCropRegion->left;
1738    }
1739    if (roi->rect.top < scalerCropRegion->top) {
1740        roi->rect.top = scalerCropRegion->top;
1741    }
1742    if (roi_x_max > crop_x_max) {
1743        roi_x_max = crop_x_max;
1744    }
1745    if (roi_y_max > crop_y_max) {
1746        roi_y_max = crop_y_max;
1747    }
1748    roi->rect.width = roi_x_max - roi->rect.left;
1749    roi->rect.height = roi_y_max - roi->rect.top;
1750    return true;
1751}
1752
1753/*===========================================================================
1754 * FUNCTION   : convertLandmarks
1755 *
1756 * DESCRIPTION: helper method to extract the landmarks from face detection info
1757 *
1758 * PARAMETERS :
1759 *   @face   : cam_rect_t struct to convert
1760 *   @landmarks : int32_t destination array
1761 *
1762 *
1763 *==========================================================================*/
1764void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1765{
1766    landmarks[0] = face.left_eye_center.x;
1767    landmarks[1] = face.left_eye_center.y;
1768    landmarks[2] = face.right_eye_center.y;
1769    landmarks[3] = face.right_eye_center.y;
1770    landmarks[4] = face.mouth_center.x;
1771    landmarks[5] = face.mouth_center.y;
1772}
1773
1774#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1775/*===========================================================================
1776 * FUNCTION   : initCapabilities
1777 *
1778 * DESCRIPTION: initialize camera capabilities in static data struct
1779 *
1780 * PARAMETERS :
1781 *   @cameraId  : camera Id
1782 *
1783 * RETURN     : int32_t type of status
1784 *              NO_ERROR  -- success
1785 *              none-zero failure code
1786 *==========================================================================*/
1787int QCamera3HardwareInterface::initCapabilities(int cameraId)
1788{
1789    int rc = 0;
1790    mm_camera_vtbl_t *cameraHandle = NULL;
1791    QCamera3HeapMemory *capabilityHeap = NULL;
1792
1793    cameraHandle = camera_open(cameraId);
1794    if (!cameraHandle) {
1795        ALOGE("%s: camera_open failed", __func__);
1796        rc = -1;
1797        goto open_failed;
1798    }
1799
1800    capabilityHeap = new QCamera3HeapMemory();
1801    if (capabilityHeap == NULL) {
1802        ALOGE("%s: creation of capabilityHeap failed", __func__);
1803        goto heap_creation_failed;
1804    }
1805    /* Allocate memory for capability buffer */
1806    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1807    if(rc != OK) {
1808        ALOGE("%s: No memory for cappability", __func__);
1809        goto allocate_failed;
1810    }
1811
1812    /* Map memory for capability buffer */
1813    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1814    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1815                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1816                                capabilityHeap->getFd(0),
1817                                sizeof(cam_capability_t));
1818    if(rc < 0) {
1819        ALOGE("%s: failed to map capability buffer", __func__);
1820        goto map_failed;
1821    }
1822
1823    /* Query Capability */
1824    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1825    if(rc < 0) {
1826        ALOGE("%s: failed to query capability",__func__);
1827        goto query_failed;
1828    }
1829    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1830    if (!gCamCapability[cameraId]) {
1831        ALOGE("%s: out of memory", __func__);
1832        goto query_failed;
1833    }
1834    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1835                                        sizeof(cam_capability_t));
1836    rc = 0;
1837
1838query_failed:
1839    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1840                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1841map_failed:
1842    capabilityHeap->deallocate();
1843allocate_failed:
1844    delete capabilityHeap;
1845heap_creation_failed:
1846    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1847    cameraHandle = NULL;
1848open_failed:
1849    return rc;
1850}
1851
1852/*===========================================================================
1853 * FUNCTION   : initParameters
1854 *
1855 * DESCRIPTION: initialize camera parameters
1856 *
1857 * PARAMETERS :
1858 *
1859 * RETURN     : int32_t type of status
1860 *              NO_ERROR  -- success
1861 *              none-zero failure code
1862 *==========================================================================*/
1863int QCamera3HardwareInterface::initParameters()
1864{
1865    int rc = 0;
1866
1867    //Allocate Set Param Buffer
1868    mParamHeap = new QCamera3HeapMemory();
1869    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1870    if(rc != OK) {
1871        rc = NO_MEMORY;
1872        ALOGE("Failed to allocate SETPARM Heap memory");
1873        delete mParamHeap;
1874        mParamHeap = NULL;
1875        return rc;
1876    }
1877
1878    //Map memory for parameters buffer
1879    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1880            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1881            mParamHeap->getFd(0),
1882            sizeof(parm_buffer_t));
1883    if(rc < 0) {
1884        ALOGE("%s:failed to map SETPARM buffer",__func__);
1885        rc = FAILED_TRANSACTION;
1886        mParamHeap->deallocate();
1887        delete mParamHeap;
1888        mParamHeap = NULL;
1889        return rc;
1890    }
1891
1892    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1893    return rc;
1894}
1895
1896/*===========================================================================
1897 * FUNCTION   : deinitParameters
1898 *
1899 * DESCRIPTION: de-initialize camera parameters
1900 *
1901 * PARAMETERS :
1902 *
1903 * RETURN     : NONE
1904 *==========================================================================*/
1905void QCamera3HardwareInterface::deinitParameters()
1906{
1907    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1908            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1909
1910    mParamHeap->deallocate();
1911    delete mParamHeap;
1912    mParamHeap = NULL;
1913
1914    mParameters = NULL;
1915}
1916
1917/*===========================================================================
1918 * FUNCTION   : calcMaxJpegSize
1919 *
1920 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1921 *
1922 * PARAMETERS :
1923 *
1924 * RETURN     : max_jpeg_size
1925 *==========================================================================*/
1926int QCamera3HardwareInterface::calcMaxJpegSize()
1927{
1928    int32_t max_jpeg_size = 0;
1929    int temp_width, temp_height;
1930    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1931        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1932        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1933        if (temp_width * temp_height > max_jpeg_size ) {
1934            max_jpeg_size = temp_width * temp_height;
1935        }
1936    }
1937    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1938    return max_jpeg_size;
1939}
1940
1941/*===========================================================================
1942 * FUNCTION   : initStaticMetadata
1943 *
1944 * DESCRIPTION: initialize the static metadata
1945 *
1946 * PARAMETERS :
1947 *   @cameraId  : camera Id
1948 *
1949 * RETURN     : int32_t type of status
1950 *              0  -- success
1951 *              non-zero failure code
1952 *==========================================================================*/
1953int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1954{
1955    int rc = 0;
1956    CameraMetadata staticInfo;
1957
1958    /* android.info: hardware level */
1959    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1960    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1961        &supportedHardwareLevel, 1);
1962
1963    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1964    /*HAL 3 only*/
1965    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1966                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1967
1968    /*hard coded for now but this should come from sensor*/
1969    float min_focus_distance;
1970    if(facingBack){
1971        min_focus_distance = 10;
1972    } else {
1973        min_focus_distance = 0;
1974    }
1975    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1976                    &min_focus_distance, 1);
1977
1978    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1979                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1980
1981    /*should be using focal lengths but sensor doesn't provide that info now*/
1982    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1983                      &gCamCapability[cameraId]->focal_length,
1984                      1);
1985
1986    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1987                      gCamCapability[cameraId]->apertures,
1988                      gCamCapability[cameraId]->apertures_count);
1989
1990    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1991                gCamCapability[cameraId]->filter_densities,
1992                gCamCapability[cameraId]->filter_densities_count);
1993
1994
1995    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1996                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1997                      gCamCapability[cameraId]->optical_stab_modes_count);
1998
1999    staticInfo.update(ANDROID_LENS_POSITION,
2000                      gCamCapability[cameraId]->lens_position,
2001                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2002
2003    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2004                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2005    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2006                      lens_shading_map_size,
2007                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2008
2009    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2010                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2011    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2012            geo_correction_map_size,
2013            sizeof(geo_correction_map_size)/sizeof(int32_t));
2014
2015    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2016                       gCamCapability[cameraId]->geo_correction_map,
2017                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2018
2019    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2020            gCamCapability[cameraId]->sensor_physical_size, 2);
2021
2022    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2023            gCamCapability[cameraId]->exposure_time_range, 2);
2024
2025    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2026            &gCamCapability[cameraId]->max_frame_duration, 1);
2027
2028
2029    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2030                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2031
2032    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2033                                               gCamCapability[cameraId]->pixel_array_size.height};
2034    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2035                      pixel_array_size, 2);
2036
2037    int32_t active_array_size[] = {0, 0,
2038                                                gCamCapability[cameraId]->active_array_size.width,
2039                                                gCamCapability[cameraId]->active_array_size.height};
2040    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2041                      active_array_size, 4);
2042
2043    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2044            &gCamCapability[cameraId]->white_level, 1);
2045
2046    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2047            gCamCapability[cameraId]->black_level_pattern, 4);
2048
2049    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2050                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2051
2052    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2053                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2054
2055    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2056                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2057
2058    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2059                      &gCamCapability[cameraId]->histogram_size, 1);
2060
2061    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2062            &gCamCapability[cameraId]->max_histogram_count, 1);
2063
2064    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2065                                                gCamCapability[cameraId]->sharpness_map_size.height};
2066
2067    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2068            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2069
2070    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2071            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2072
2073
2074    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2075                      &gCamCapability[cameraId]->raw_min_duration,
2076                       1);
2077
2078    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2079                                                HAL_PIXEL_FORMAT_BLOB};
2080    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2081    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2082                      scalar_formats,
2083                      scalar_formats_count);
2084
2085    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2086    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2087              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2088              available_processed_sizes);
2089    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2090                available_processed_sizes,
2091                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2092
2093    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2094                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2095                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2096
2097    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2098    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2099                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2100                 available_fps_ranges);
2101    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2102            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2103
2104    camera_metadata_rational exposureCompensationStep = {
2105            gCamCapability[cameraId]->exp_compensation_step.numerator,
2106            gCamCapability[cameraId]->exp_compensation_step.denominator};
2107    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2108                      &exposureCompensationStep, 1);
2109
2110    /*TO DO*/
2111    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2112    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2113                      availableVstabModes, sizeof(availableVstabModes));
2114
2115    /*HAL 1 and HAL 3 common*/
2116    float maxZoom = 4;
2117    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2118            &maxZoom, 1);
2119
2120    int32_t max3aRegions = 1;
2121    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2122            &max3aRegions, 1);
2123
2124    uint8_t availableFaceDetectModes[] = {
2125            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2126            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2127    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2128                      availableFaceDetectModes,
2129                      sizeof(availableFaceDetectModes));
2130
2131    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2132                                       gCamCapability[cameraId]->raw_dim.height};
2133    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2134                      raw_size,
2135                      sizeof(raw_size)/sizeof(uint32_t));
2136
2137    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2138                                                        gCamCapability[cameraId]->exposure_compensation_max};
2139    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2140            exposureCompensationRange,
2141            sizeof(exposureCompensationRange)/sizeof(int32_t));
2142
2143    uint8_t lensFacing = (facingBack) ?
2144            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2145    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2146
2147    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2148                available_processed_sizes,
2149                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2150
2151    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2152                      available_thumbnail_sizes,
2153                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2154
2155    int32_t max_jpeg_size = 0;
2156    int temp_width, temp_height;
2157    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2158        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2159        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2160        if (temp_width * temp_height > max_jpeg_size ) {
2161            max_jpeg_size = temp_width * temp_height;
2162        }
2163    }
2164    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2165    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2166                      &max_jpeg_size, 1);
2167
2168    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2169    int32_t size = 0;
2170    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2171        int val = lookupFwkName(EFFECT_MODES_MAP,
2172                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2173                                   gCamCapability[cameraId]->supported_effects[i]);
2174        if (val != NAME_NOT_FOUND) {
2175            avail_effects[size] = (uint8_t)val;
2176            size++;
2177        }
2178    }
2179    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2180                      avail_effects,
2181                      size);
2182
2183    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2184    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2185    int32_t supported_scene_modes_cnt = 0;
2186    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2187        int val = lookupFwkName(SCENE_MODES_MAP,
2188                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2189                                gCamCapability[cameraId]->supported_scene_modes[i]);
2190        if (val != NAME_NOT_FOUND) {
2191            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2192            supported_indexes[supported_scene_modes_cnt] = i;
2193            supported_scene_modes_cnt++;
2194        }
2195    }
2196
2197    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2198                      avail_scene_modes,
2199                      supported_scene_modes_cnt);
2200
2201    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2202    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2203                      supported_scene_modes_cnt,
2204                      scene_mode_overrides,
2205                      supported_indexes,
2206                      cameraId);
2207    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2208                      scene_mode_overrides,
2209                      supported_scene_modes_cnt*3);
2210
2211    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2212    size = 0;
2213    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2214        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2215                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2216                                 gCamCapability[cameraId]->supported_antibandings[i]);
2217        if (val != NAME_NOT_FOUND) {
2218            avail_antibanding_modes[size] = (uint8_t)val;
2219            size++;
2220        }
2221
2222    }
2223    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2224                      avail_antibanding_modes,
2225                      size);
2226
2227    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2228    size = 0;
2229    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2230        int val = lookupFwkName(FOCUS_MODES_MAP,
2231                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2232                                gCamCapability[cameraId]->supported_focus_modes[i]);
2233        if (val != NAME_NOT_FOUND) {
2234            avail_af_modes[size] = (uint8_t)val;
2235            size++;
2236        }
2237    }
2238    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2239                      avail_af_modes,
2240                      size);
2241
2242    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2243    size = 0;
2244    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2245        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2246                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2247                                    gCamCapability[cameraId]->supported_white_balances[i]);
2248        if (val != NAME_NOT_FOUND) {
2249            avail_awb_modes[size] = (uint8_t)val;
2250            size++;
2251        }
2252    }
2253    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2254                      avail_awb_modes,
2255                      size);
2256
2257    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2258    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2259      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2260
2261    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2262            available_flash_levels,
2263            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2264
2265
2266    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2267    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2268            &flashAvailable, 1);
2269
2270    uint8_t avail_ae_modes[5];
2271    size = 0;
2272    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2273        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2274        size++;
2275    }
2276    if (flashAvailable) {
2277        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2278        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2279        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2280    }
2281    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2282                      avail_ae_modes,
2283                      size);
2284
2285    int32_t sensitivity_range[2];
2286    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2287    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2288    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2289                      sensitivity_range,
2290                      sizeof(sensitivity_range) / sizeof(int32_t));
2291
2292    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2293                      &gCamCapability[cameraId]->max_analog_sensitivity,
2294                      1);
2295
2296    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2297                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2298                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2299
2300    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2301    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2302                      &sensor_orientation,
2303                      1);
2304
2305    int32_t max_output_streams[3] = {1, 3, 1};
2306    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2307                      max_output_streams,
2308                      3);
2309
2310    gStaticMetadata[cameraId] = staticInfo.release();
2311    return rc;
2312}
2313
2314/*===========================================================================
2315 * FUNCTION   : makeTable
2316 *
2317 * DESCRIPTION: make a table of sizes
2318 *
2319 * PARAMETERS :
2320 *
2321 *
2322 *==========================================================================*/
2323void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2324                                          int32_t* sizeTable)
2325{
2326    int j = 0;
2327    for (int i = 0; i < size; i++) {
2328        sizeTable[j] = dimTable[i].width;
2329        sizeTable[j+1] = dimTable[i].height;
2330        j+=2;
2331    }
2332}
2333
2334/*===========================================================================
2335 * FUNCTION   : makeFPSTable
2336 *
2337 * DESCRIPTION: make a table of fps ranges
2338 *
2339 * PARAMETERS :
2340 *
2341 *==========================================================================*/
2342void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2343                                          int32_t* fpsRangesTable)
2344{
2345    int j = 0;
2346    for (int i = 0; i < size; i++) {
2347        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2348        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2349        j+=2;
2350    }
2351}
2352
2353/*===========================================================================
2354 * FUNCTION   : makeOverridesList
2355 *
2356 * DESCRIPTION: make a list of scene mode overrides
2357 *
2358 * PARAMETERS :
2359 *
2360 *
2361 *==========================================================================*/
2362void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2363                                                  uint8_t size, uint8_t* overridesList,
2364                                                  uint8_t* supported_indexes,
2365                                                  int camera_id)
2366{
2367    /*daemon will give a list of overrides for all scene modes.
2368      However we should send the fwk only the overrides for the scene modes
2369      supported by the framework*/
2370    int j = 0, index = 0, supt = 0;
2371    uint8_t focus_override;
2372    for (int i = 0; i < size; i++) {
2373        supt = 0;
2374        index = supported_indexes[i];
2375        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2376        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2377                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2378                                                    overridesTable[index].awb_mode);
2379        focus_override = (uint8_t)overridesTable[index].af_mode;
2380        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2381           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2382              supt = 1;
2383              break;
2384           }
2385        }
2386        if (supt) {
2387           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2388                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2389                                              focus_override);
2390        } else {
2391           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2392        }
2393        j+=3;
2394    }
2395}
2396
2397/*===========================================================================
2398 * FUNCTION   : getPreviewHalPixelFormat
2399 *
2400 * DESCRIPTION: convert the format to type recognized by framework
2401 *
2402 * PARAMETERS : format : the format from backend
2403 *
2404 ** RETURN    : format recognized by framework
2405 *
2406 *==========================================================================*/
2407int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2408{
2409    int32_t halPixelFormat;
2410
2411    switch (format) {
2412    case CAM_FORMAT_YUV_420_NV12:
2413        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2414        break;
2415    case CAM_FORMAT_YUV_420_NV21:
2416        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2417        break;
2418    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2419        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2420        break;
2421    case CAM_FORMAT_YUV_420_YV12:
2422        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2423        break;
2424    case CAM_FORMAT_YUV_422_NV16:
2425    case CAM_FORMAT_YUV_422_NV61:
2426    default:
2427        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2428        break;
2429    }
2430    return halPixelFormat;
2431}
2432
2433/*===========================================================================
2434 * FUNCTION   : getSensorSensitivity
2435 *
2436 * DESCRIPTION: convert iso_mode to an integer value
2437 *
2438 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2439 *
2440 ** RETURN    : sensitivity supported by sensor
2441 *
2442 *==========================================================================*/
2443int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2444{
2445    int32_t sensitivity;
2446
2447    switch (iso_mode) {
2448    case CAM_ISO_MODE_100:
2449        sensitivity = 100;
2450        break;
2451    case CAM_ISO_MODE_200:
2452        sensitivity = 200;
2453        break;
2454    case CAM_ISO_MODE_400:
2455        sensitivity = 400;
2456        break;
2457    case CAM_ISO_MODE_800:
2458        sensitivity = 800;
2459        break;
2460    case CAM_ISO_MODE_1600:
2461        sensitivity = 1600;
2462        break;
2463    default:
2464        sensitivity = -1;
2465        break;
2466    }
2467    return sensitivity;
2468}
2469
2470
2471/*===========================================================================
2472 * FUNCTION   : AddSetParmEntryToBatch
2473 *
2474 * DESCRIPTION: add set parameter entry into batch
2475 *
2476 * PARAMETERS :
2477 *   @p_table     : ptr to parameter buffer
2478 *   @paramType   : parameter type
2479 *   @paramLength : length of parameter value
2480 *   @paramValue  : ptr to parameter value
2481 *
2482 * RETURN     : int32_t type of status
2483 *              NO_ERROR  -- success
2484 *              none-zero failure code
2485 *==========================================================================*/
2486int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2487                                                          cam_intf_parm_type_t paramType,
2488                                                          uint32_t paramLength,
2489                                                          void *paramValue)
2490{
2491    int position = paramType;
2492    int current, next;
2493
2494    /*************************************************************************
2495    *                 Code to take care of linking next flags                *
2496    *************************************************************************/
2497    current = GET_FIRST_PARAM_ID(p_table);
2498    if (position == current){
2499        //DO NOTHING
2500    } else if (position < current){
2501        SET_NEXT_PARAM_ID(position, p_table, current);
2502        SET_FIRST_PARAM_ID(p_table, position);
2503    } else {
2504        /* Search for the position in the linked list where we need to slot in*/
2505        while (position > GET_NEXT_PARAM_ID(current, p_table))
2506            current = GET_NEXT_PARAM_ID(current, p_table);
2507
2508        /*If node already exists no need to alter linking*/
2509        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2510            next = GET_NEXT_PARAM_ID(current, p_table);
2511            SET_NEXT_PARAM_ID(current, p_table, position);
2512            SET_NEXT_PARAM_ID(position, p_table, next);
2513        }
2514    }
2515
2516    /*************************************************************************
2517    *                   Copy contents into entry                             *
2518    *************************************************************************/
2519
2520    if (paramLength > sizeof(parm_type_t)) {
2521        ALOGE("%s:Size of input larger than max entry size",__func__);
2522        return BAD_VALUE;
2523    }
2524    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2525    return NO_ERROR;
2526}
2527
2528/*===========================================================================
2529 * FUNCTION   : lookupFwkName
2530 *
2531 * DESCRIPTION: In case the enum is not same in fwk and backend
2532 *              make sure the parameter is correctly propogated
2533 *
2534 * PARAMETERS  :
2535 *   @arr      : map between the two enums
2536 *   @len      : len of the map
2537 *   @hal_name : name of the hal_parm to map
2538 *
2539 * RETURN     : int type of status
2540 *              fwk_name  -- success
2541 *              none-zero failure code
2542 *==========================================================================*/
2543int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2544                                             int len, int hal_name)
2545{
2546
2547    for (int i = 0; i < len; i++) {
2548        if (arr[i].hal_name == hal_name)
2549            return arr[i].fwk_name;
2550    }
2551
2552    /* Not able to find matching framework type is not necessarily
2553     * an error case. This happens when mm-camera supports more attributes
2554     * than the frameworks do */
2555    ALOGD("%s: Cannot find matching framework type", __func__);
2556    return NAME_NOT_FOUND;
2557}
2558
2559/*===========================================================================
2560 * FUNCTION   : lookupHalName
2561 *
2562 * DESCRIPTION: In case the enum is not same in fwk and backend
2563 *              make sure the parameter is correctly propogated
2564 *
2565 * PARAMETERS  :
2566 *   @arr      : map between the two enums
2567 *   @len      : len of the map
2568 *   @fwk_name : name of the hal_parm to map
2569 *
2570 * RETURN     : int32_t type of status
2571 *              hal_name  -- success
2572 *              none-zero failure code
2573 *==========================================================================*/
2574int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2575                                             int len, int fwk_name)
2576{
2577    for (int i = 0; i < len; i++) {
2578       if (arr[i].fwk_name == fwk_name)
2579           return arr[i].hal_name;
2580    }
2581    ALOGE("%s: Cannot find matching hal type", __func__);
2582    return NAME_NOT_FOUND;
2583}
2584
2585/*===========================================================================
2586 * FUNCTION   : getCapabilities
2587 *
2588 * DESCRIPTION: query camera capabilities
2589 *
2590 * PARAMETERS :
2591 *   @cameraId  : camera Id
2592 *   @info      : camera info struct to be filled in with camera capabilities
2593 *
2594 * RETURN     : int32_t type of status
2595 *              NO_ERROR  -- success
2596 *              none-zero failure code
2597 *==========================================================================*/
2598int QCamera3HardwareInterface::getCamInfo(int cameraId,
2599                                    struct camera_info *info)
2600{
2601    int rc = 0;
2602
2603    if (NULL == gCamCapability[cameraId]) {
2604        rc = initCapabilities(cameraId);
2605        if (rc < 0) {
2606            //pthread_mutex_unlock(&g_camlock);
2607            return rc;
2608        }
2609    }
2610
2611    if (NULL == gStaticMetadata[cameraId]) {
2612        rc = initStaticMetadata(cameraId);
2613        if (rc < 0) {
2614            return rc;
2615        }
2616    }
2617
2618    switch(gCamCapability[cameraId]->position) {
2619    case CAM_POSITION_BACK:
2620        info->facing = CAMERA_FACING_BACK;
2621        break;
2622
2623    case CAM_POSITION_FRONT:
2624        info->facing = CAMERA_FACING_FRONT;
2625        break;
2626
2627    default:
2628        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2629        rc = -1;
2630        break;
2631    }
2632
2633
2634    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2635    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2636    info->static_camera_characteristics = gStaticMetadata[cameraId];
2637
2638    return rc;
2639}
2640
2641/*===========================================================================
2642 * FUNCTION   : translateMetadata
2643 *
2644 * DESCRIPTION: translate the metadata into camera_metadata_t
2645 *
2646 * PARAMETERS : type of the request
2647 *
2648 *
2649 * RETURN     : success: camera_metadata_t*
2650 *              failure: NULL
2651 *
2652 *==========================================================================*/
2653camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2654{
2655    pthread_mutex_lock(&mMutex);
2656
2657    if (mDefaultMetadata[type] != NULL) {
2658        pthread_mutex_unlock(&mMutex);
2659        return mDefaultMetadata[type];
2660    }
2661    //first time we are handling this request
2662    //fill up the metadata structure using the wrapper class
2663    CameraMetadata settings;
2664    //translate from cam_capability_t to camera_metadata_tag_t
2665    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2666    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2667
2668    /*control*/
2669
2670    uint8_t controlIntent = 0;
2671    switch (type) {
2672      case CAMERA3_TEMPLATE_PREVIEW:
2673        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2674        break;
2675      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2676        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2677        break;
2678      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2679        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2680        break;
2681      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2682        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2683        break;
2684      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2685        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2686        break;
2687      default:
2688        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2689        break;
2690    }
2691    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2692
2693    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2694            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2695
2696    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2697    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2698
2699    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2700    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2701
2702    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2703    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2704
2705    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2706    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2707
2708    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2709    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2710
2711    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2712    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2713
2714    static uint8_t focusMode;
2715    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2716        ALOGE("%s: Setting focus mode to auto", __func__);
2717        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2718    } else {
2719        ALOGE("%s: Setting focus mode to off", __func__);
2720        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2721    }
2722    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2723
2724    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2725    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2726
2727    /*flash*/
2728    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2729    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2730
2731    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2732    settings.update(ANDROID_FLASH_FIRING_POWER,
2733            &flashFiringLevel, 1);
2734
2735    /* lens */
2736    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2737    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2738
2739    if (gCamCapability[mCameraId]->filter_densities_count) {
2740        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2741        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2742                        gCamCapability[mCameraId]->filter_densities_count);
2743    }
2744
2745    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2746    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2747
2748    /* frame duration */
2749    int64_t default_frame_duration = NSEC_PER_33MSEC;
2750    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2751
2752    /* sensitivity */
2753    int32_t default_sensitivity = 100;
2754    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2755
2756    mDefaultMetadata[type] = settings.release();
2757
2758    pthread_mutex_unlock(&mMutex);
2759    return mDefaultMetadata[type];
2760}
2761
2762/*===========================================================================
2763 * FUNCTION   : setFrameParameters
2764 *
2765 * DESCRIPTION: set parameters per frame as requested in the metadata from
2766 *              framework
2767 *
2768 * PARAMETERS :
2769 *   @request   : request that needs to be serviced
2770 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2771 *
2772 * RETURN     : success: NO_ERROR
2773 *              failure:
2774 *==========================================================================*/
2775int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2776                    uint32_t streamTypeMask)
2777{
2778    /*translate from camera_metadata_t type to parm_type_t*/
2779    int rc = 0;
2780    if (request->settings == NULL && mFirstRequest) {
2781        /*settings cannot be null for the first request*/
2782        return BAD_VALUE;
2783    }
2784
2785    int32_t hal_version = CAM_HAL_V3;
2786
2787    memset(mParameters, 0, sizeof(parm_buffer_t));
2788    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2789    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2790                sizeof(hal_version), &hal_version);
2791    if (rc < 0) {
2792        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2793        return BAD_VALUE;
2794    }
2795
2796    /*we need to update the frame number in the parameters*/
2797    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2798                                sizeof(request->frame_number), &(request->frame_number));
2799    if (rc < 0) {
2800        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2801        return BAD_VALUE;
2802    }
2803
2804    /* Update stream id mask where buffers are requested */
2805    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2806                                sizeof(streamTypeMask), &streamTypeMask);
2807    if (rc < 0) {
2808        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2809        return BAD_VALUE;
2810    }
2811
2812    if(request->settings != NULL){
2813        rc = translateMetadataToParameters(request);
2814    }
2815    /*set the parameters to backend*/
2816    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2817    return rc;
2818}
2819
2820/*===========================================================================
2821 * FUNCTION   : translateMetadataToParameters
2822 *
2823 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2824 *
2825 *
2826 * PARAMETERS :
2827 *   @request  : request sent from framework
2828 *
2829 *
2830 * RETURN     : success: NO_ERROR
2831 *              failure:
2832 *==========================================================================*/
2833int QCamera3HardwareInterface::translateMetadataToParameters
2834                                  (const camera3_capture_request_t *request)
2835{
2836    int rc = 0;
2837    CameraMetadata frame_settings;
2838    frame_settings = request->settings;
2839
2840    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2841        int32_t antibandingMode =
2842            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2843        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2844                sizeof(antibandingMode), &antibandingMode);
2845    }
2846
2847    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2848        int32_t expCompensation = frame_settings.find(
2849            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2850        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2851            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2852        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2853            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2854        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2855          sizeof(expCompensation), &expCompensation);
2856    }
2857
2858    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2859        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2860        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2861                sizeof(aeLock), &aeLock);
2862    }
2863    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2864        cam_fps_range_t fps_range;
2865        fps_range.min_fps =
2866            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2867        fps_range.max_fps =
2868            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2869        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2870                sizeof(fps_range), &fps_range);
2871    }
2872
2873    float focalDistance = -1.0;
2874    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2875        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2876        rc = AddSetParmEntryToBatch(mParameters,
2877                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2878                sizeof(focalDistance), &focalDistance);
2879    }
2880
2881    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2882        uint8_t fwk_focusMode =
2883            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2884        uint8_t focusMode;
2885        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2886            focusMode = CAM_FOCUS_MODE_INFINITY;
2887        } else{
2888         focusMode = lookupHalName(FOCUS_MODES_MAP,
2889                                   sizeof(FOCUS_MODES_MAP),
2890                                   fwk_focusMode);
2891        }
2892        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2893                sizeof(focusMode), &focusMode);
2894    }
2895
2896    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2897        uint8_t awbLock =
2898            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2899        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2900                sizeof(awbLock), &awbLock);
2901    }
2902
2903    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2904        uint8_t fwk_whiteLevel =
2905            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2906        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2907                sizeof(WHITE_BALANCE_MODES_MAP),
2908                fwk_whiteLevel);
2909        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2910                sizeof(whiteLevel), &whiteLevel);
2911    }
2912
2913    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2914        uint8_t fwk_effectMode =
2915            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2916        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2917                sizeof(EFFECT_MODES_MAP),
2918                fwk_effectMode);
2919        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2920                sizeof(effectMode), &effectMode);
2921    }
2922
2923    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2924        uint8_t fwk_aeMode =
2925            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2926        uint8_t aeMode;
2927        int32_t redeye;
2928
2929        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2930            aeMode = CAM_AE_MODE_OFF;
2931        } else {
2932            aeMode = CAM_AE_MODE_ON;
2933        }
2934        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2935            redeye = 1;
2936        } else {
2937            redeye = 0;
2938        }
2939
2940        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2941                                          sizeof(AE_FLASH_MODE_MAP),
2942                                          fwk_aeMode);
2943        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2944                sizeof(aeMode), &aeMode);
2945        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2946                sizeof(flashMode), &flashMode);
2947        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2948                sizeof(redeye), &redeye);
2949    }
2950
2951    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2952        uint8_t colorCorrectMode =
2953            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2954        rc =
2955            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2956                    sizeof(colorCorrectMode), &colorCorrectMode);
2957    }
2958
2959    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2960        cam_color_correct_gains_t colorCorrectGains;
2961        for (int i = 0; i < 4; i++) {
2962            colorCorrectGains.gains[i] =
2963                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2964        }
2965        rc =
2966            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2967                    sizeof(colorCorrectGains), &colorCorrectGains);
2968    }
2969
2970    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2971        cam_color_correct_matrix_t colorCorrectTransform;
2972        cam_rational_type_t transform_elem;
2973        int num = 0;
2974        for (int i = 0; i < 3; i++) {
2975           for (int j = 0; j < 3; j++) {
2976              transform_elem.numerator =
2977                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2978              transform_elem.denominator =
2979                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2980              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2981              num++;
2982           }
2983        }
2984        rc =
2985            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
2986                    sizeof(colorCorrectTransform), &colorCorrectTransform);
2987    }
2988
2989    cam_trigger_t aecTrigger;
2990    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2991    aecTrigger.trigger_id = -1;
2992    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2993        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2994        aecTrigger.trigger =
2995            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2996        aecTrigger.trigger_id =
2997            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2998    }
2999    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3000                                sizeof(aecTrigger), &aecTrigger);
3001
3002    /*af_trigger must come with a trigger id*/
3003    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3004        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3005        cam_trigger_t af_trigger;
3006        af_trigger.trigger =
3007            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3008        af_trigger.trigger_id =
3009            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3010        rc = AddSetParmEntryToBatch(mParameters,
3011                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3012    }
3013
3014    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3015        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3016        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3017                sizeof(metaMode), &metaMode);
3018        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3019           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3020           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3021                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3022                                             fwk_sceneMode);
3023           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3024                sizeof(sceneMode), &sceneMode);
3025        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3026           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3027           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3028                sizeof(sceneMode), &sceneMode);
3029        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3030           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3031           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3032                sizeof(sceneMode), &sceneMode);
3033        }
3034    }
3035
3036    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3037        int32_t demosaic =
3038            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3039        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3040                sizeof(demosaic), &demosaic);
3041    }
3042
3043    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3044        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3045        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3046                sizeof(edgeMode), &edgeMode);
3047    }
3048
3049    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3050        int32_t edgeStrength =
3051            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3052        rc = AddSetParmEntryToBatch(mParameters,
3053                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3054    }
3055
3056    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3057        int32_t respectFlashMode = 1;
3058        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3059            uint8_t fwk_aeMode =
3060                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3061            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3062                respectFlashMode = 0;
3063                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3064                    __func__);
3065            }
3066        }
3067        if (respectFlashMode) {
3068            uint8_t flashMode =
3069                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3070            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3071                                          sizeof(FLASH_MODES_MAP),
3072                                          flashMode);
3073            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3074            // To check: CAM_INTF_META_FLASH_MODE usage
3075            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3076                          sizeof(flashMode), &flashMode);
3077        }
3078    }
3079
3080    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3081        uint8_t flashPower =
3082            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3083        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3084                sizeof(flashPower), &flashPower);
3085    }
3086
3087    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3088        int64_t flashFiringTime =
3089            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3090        rc = AddSetParmEntryToBatch(mParameters,
3091                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3092    }
3093
3094    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3095        uint8_t geometricMode =
3096            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3097        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3098                sizeof(geometricMode), &geometricMode);
3099    }
3100
3101    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3102        uint8_t geometricStrength =
3103            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3104        rc = AddSetParmEntryToBatch(mParameters,
3105                CAM_INTF_META_GEOMETRIC_STRENGTH,
3106                sizeof(geometricStrength), &geometricStrength);
3107    }
3108
3109    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3110        uint8_t hotPixelMode =
3111            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3112        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3113                sizeof(hotPixelMode), &hotPixelMode);
3114    }
3115
3116    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3117        float lensAperture =
3118            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3119        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3120                sizeof(lensAperture), &lensAperture);
3121    }
3122
3123    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3124        float filterDensity =
3125            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3126        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3127                sizeof(filterDensity), &filterDensity);
3128    }
3129
3130    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3131        float focalLength =
3132            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3133        rc = AddSetParmEntryToBatch(mParameters,
3134                CAM_INTF_META_LENS_FOCAL_LENGTH,
3135                sizeof(focalLength), &focalLength);
3136    }
3137
3138    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3139        uint8_t optStabMode =
3140            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3141        rc = AddSetParmEntryToBatch(mParameters,
3142                CAM_INTF_META_LENS_OPT_STAB_MODE,
3143                sizeof(optStabMode), &optStabMode);
3144    }
3145
3146    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3147        uint8_t noiseRedMode =
3148            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3149        rc = AddSetParmEntryToBatch(mParameters,
3150                CAM_INTF_META_NOISE_REDUCTION_MODE,
3151                sizeof(noiseRedMode), &noiseRedMode);
3152    }
3153
3154    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3155        uint8_t noiseRedStrength =
3156            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3157        rc = AddSetParmEntryToBatch(mParameters,
3158                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3159                sizeof(noiseRedStrength), &noiseRedStrength);
3160    }
3161
3162    cam_crop_region_t scalerCropRegion;
3163    bool scalerCropSet = false;
3164    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3165        scalerCropRegion.left =
3166            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3167        scalerCropRegion.top =
3168            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3169        scalerCropRegion.width =
3170            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3171        scalerCropRegion.height =
3172            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3173        rc = AddSetParmEntryToBatch(mParameters,
3174                CAM_INTF_META_SCALER_CROP_REGION,
3175                sizeof(scalerCropRegion), &scalerCropRegion);
3176        scalerCropSet = true;
3177    }
3178
3179    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3180        int64_t sensorExpTime =
3181            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3182        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3183        rc = AddSetParmEntryToBatch(mParameters,
3184                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3185                sizeof(sensorExpTime), &sensorExpTime);
3186    }
3187
3188    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3189        int64_t sensorFrameDuration =
3190            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3191        int64_t minFrameDuration = getMinFrameDuration(request);
3192        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3193        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3194            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3195        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3196        rc = AddSetParmEntryToBatch(mParameters,
3197                CAM_INTF_META_SENSOR_FRAME_DURATION,
3198                sizeof(sensorFrameDuration), &sensorFrameDuration);
3199    }
3200
3201    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3202        int32_t sensorSensitivity =
3203            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3204        if (sensorSensitivity <
3205                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3206            sensorSensitivity =
3207                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3208        if (sensorSensitivity >
3209                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3210            sensorSensitivity =
3211                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3212        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3213        rc = AddSetParmEntryToBatch(mParameters,
3214                CAM_INTF_META_SENSOR_SENSITIVITY,
3215                sizeof(sensorSensitivity), &sensorSensitivity);
3216    }
3217
3218    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3219        int32_t shadingMode =
3220            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3221        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3222                sizeof(shadingMode), &shadingMode);
3223    }
3224
3225    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3226        uint8_t shadingStrength =
3227            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3228        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3229                sizeof(shadingStrength), &shadingStrength);
3230    }
3231
3232    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3233        uint8_t fwk_facedetectMode =
3234            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3235        uint8_t facedetectMode =
3236            lookupHalName(FACEDETECT_MODES_MAP,
3237                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3238        rc = AddSetParmEntryToBatch(mParameters,
3239                CAM_INTF_META_STATS_FACEDETECT_MODE,
3240                sizeof(facedetectMode), &facedetectMode);
3241    }
3242
3243    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3244        uint8_t histogramMode =
3245            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3246        rc = AddSetParmEntryToBatch(mParameters,
3247                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3248                sizeof(histogramMode), &histogramMode);
3249    }
3250
3251    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3252        uint8_t sharpnessMapMode =
3253            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3254        rc = AddSetParmEntryToBatch(mParameters,
3255                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3256                sizeof(sharpnessMapMode), &sharpnessMapMode);
3257    }
3258
3259    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3260        uint8_t tonemapMode =
3261            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3262        rc = AddSetParmEntryToBatch(mParameters,
3263                CAM_INTF_META_TONEMAP_MODE,
3264                sizeof(tonemapMode), &tonemapMode);
3265    }
3266    int point = 0;
3267    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3268        cam_tonemap_curve_t tonemapCurveBlue;
3269        tonemapCurveBlue.tonemap_points_cnt =
3270           gCamCapability[mCameraId]->max_tone_map_curve_points;
3271        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3272            for (int j = 0; j < 2; j++) {
3273               tonemapCurveBlue.tonemap_points[i][j] =
3274                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3275               point++;
3276            }
3277        }
3278        rc = AddSetParmEntryToBatch(mParameters,
3279                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3280                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3281    }
3282    point = 0;
3283    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3284        cam_tonemap_curve_t tonemapCurveGreen;
3285        tonemapCurveGreen.tonemap_points_cnt =
3286           gCamCapability[mCameraId]->max_tone_map_curve_points;
3287        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3288            for (int j = 0; j < 2; j++) {
3289               tonemapCurveGreen.tonemap_points[i][j] =
3290                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3291               point++;
3292            }
3293        }
3294        rc = AddSetParmEntryToBatch(mParameters,
3295                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3296                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3297    }
3298    point = 0;
3299    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3300        cam_tonemap_curve_t tonemapCurveRed;
3301        tonemapCurveRed.tonemap_points_cnt =
3302           gCamCapability[mCameraId]->max_tone_map_curve_points;
3303        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3304            for (int j = 0; j < 2; j++) {
3305               tonemapCurveRed.tonemap_points[i][j] =
3306                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3307               point++;
3308            }
3309        }
3310        rc = AddSetParmEntryToBatch(mParameters,
3311                CAM_INTF_META_TONEMAP_CURVE_RED,
3312                sizeof(tonemapCurveRed), &tonemapCurveRed);
3313    }
3314
3315    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3316        uint8_t captureIntent =
3317            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3318        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3319                sizeof(captureIntent), &captureIntent);
3320    }
3321
3322    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3323        uint8_t blackLevelLock =
3324            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3325        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3326                sizeof(blackLevelLock), &blackLevelLock);
3327    }
3328
3329    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3330        uint8_t lensShadingMapMode =
3331            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3332        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3333                sizeof(lensShadingMapMode), &lensShadingMapMode);
3334    }
3335
3336    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3337        cam_area_t roi;
3338        bool reset = true;
3339        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3340        if (scalerCropSet) {
3341            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3342        }
3343        if (reset) {
3344            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3345                    sizeof(roi), &roi);
3346        }
3347    }
3348
3349    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3350        cam_area_t roi;
3351        bool reset = true;
3352        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3353        if (scalerCropSet) {
3354            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3355        }
3356        if (reset) {
3357            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3358                    sizeof(roi), &roi);
3359        }
3360    }
3361
3362    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3363        cam_area_t roi;
3364        bool reset = true;
3365        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3366        if (scalerCropSet) {
3367            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3368        }
3369        if (reset) {
3370            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3371                    sizeof(roi), &roi);
3372        }
3373    }
3374    return rc;
3375}
3376
3377/*===========================================================================
3378 * FUNCTION   : getJpegSettings
3379 *
3380 * DESCRIPTION: save the jpeg settings in the HAL
3381 *
3382 *
3383 * PARAMETERS :
3384 *   @settings  : frame settings information from framework
3385 *
3386 *
3387 * RETURN     : success: NO_ERROR
3388 *              failure:
3389 *==========================================================================*/
3390int QCamera3HardwareInterface::getJpegSettings
3391                                  (const camera_metadata_t *settings)
3392{
3393    if (mJpegSettings) {
3394        if (mJpegSettings->gps_timestamp) {
3395            free(mJpegSettings->gps_timestamp);
3396            mJpegSettings->gps_timestamp = NULL;
3397        }
3398        if (mJpegSettings->gps_coordinates) {
3399            for (int i = 0; i < 3; i++) {
3400                free(mJpegSettings->gps_coordinates[i]);
3401                mJpegSettings->gps_coordinates[i] = NULL;
3402            }
3403        }
3404        free(mJpegSettings);
3405        mJpegSettings = NULL;
3406    }
3407    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3408    CameraMetadata jpeg_settings;
3409    jpeg_settings = settings;
3410
3411    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3412        mJpegSettings->jpeg_orientation =
3413            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3414    } else {
3415        mJpegSettings->jpeg_orientation = 0;
3416    }
3417    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3418        mJpegSettings->jpeg_quality =
3419            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3420    } else {
3421        mJpegSettings->jpeg_quality = 85;
3422    }
3423    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3424        mJpegSettings->thumbnail_size.width =
3425            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3426        mJpegSettings->thumbnail_size.height =
3427            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3428    } else {
3429        mJpegSettings->thumbnail_size.width = 0;
3430        mJpegSettings->thumbnail_size.height = 0;
3431    }
3432    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3433        for (int i = 0; i < 3; i++) {
3434            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3435            *(mJpegSettings->gps_coordinates[i]) =
3436                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3437        }
3438    } else{
3439       for (int i = 0; i < 3; i++) {
3440            mJpegSettings->gps_coordinates[i] = NULL;
3441        }
3442    }
3443
3444    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3445        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3446        *(mJpegSettings->gps_timestamp) =
3447            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3448    } else {
3449        mJpegSettings->gps_timestamp = NULL;
3450    }
3451
3452    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3453        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3454        for (int i = 0; i < len; i++) {
3455            mJpegSettings->gps_processing_method[i] =
3456                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3457        }
3458        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3459            mJpegSettings->gps_processing_method[len] = '\0';
3460        }
3461    } else {
3462        mJpegSettings->gps_processing_method[0] = '\0';
3463    }
3464
3465    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3466        mJpegSettings->sensor_sensitivity =
3467            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3468    } else {
3469        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3470    }
3471
3472    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3473
3474    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3475        mJpegSettings->lens_focal_length =
3476            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3477    }
3478    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3479        mJpegSettings->exposure_compensation =
3480            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3481    }
3482    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3483    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3484    mJpegSettings->is_jpeg_format = true;
3485    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3486    return 0;
3487}
3488
3489/*===========================================================================
3490 * FUNCTION   : captureResultCb
3491 *
3492 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3493 *
3494 * PARAMETERS :
3495 *   @frame  : frame information from mm-camera-interface
3496 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3497 *   @userdata: userdata
3498 *
3499 * RETURN     : NONE
3500 *==========================================================================*/
3501void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3502                camera3_stream_buffer_t *buffer,
3503                uint32_t frame_number, void *userdata)
3504{
3505    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3506    if (hw == NULL) {
3507        ALOGE("%s: Invalid hw %p", __func__, hw);
3508        return;
3509    }
3510
3511    hw->captureResultCb(metadata, buffer, frame_number);
3512    return;
3513}
3514
3515
3516/*===========================================================================
3517 * FUNCTION   : initialize
3518 *
3519 * DESCRIPTION: Pass framework callback pointers to HAL
3520 *
3521 * PARAMETERS :
3522 *
3523 *
3524 * RETURN     : Success : 0
3525 *              Failure: -ENODEV
3526 *==========================================================================*/
3527
3528int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3529                                  const camera3_callback_ops_t *callback_ops)
3530{
3531    ALOGV("%s: E", __func__);
3532    QCamera3HardwareInterface *hw =
3533        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3534    if (!hw) {
3535        ALOGE("%s: NULL camera device", __func__);
3536        return -ENODEV;
3537    }
3538
3539    int rc = hw->initialize(callback_ops);
3540    ALOGV("%s: X", __func__);
3541    return rc;
3542}
3543
3544/*===========================================================================
3545 * FUNCTION   : configure_streams
3546 *
3547 * DESCRIPTION:
3548 *
3549 * PARAMETERS :
3550 *
3551 *
3552 * RETURN     : Success: 0
3553 *              Failure: -EINVAL (if stream configuration is invalid)
3554 *                       -ENODEV (fatal error)
3555 *==========================================================================*/
3556
3557int QCamera3HardwareInterface::configure_streams(
3558        const struct camera3_device *device,
3559        camera3_stream_configuration_t *stream_list)
3560{
3561    ALOGV("%s: E", __func__);
3562    QCamera3HardwareInterface *hw =
3563        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3564    if (!hw) {
3565        ALOGE("%s: NULL camera device", __func__);
3566        return -ENODEV;
3567    }
3568    int rc = hw->configureStreams(stream_list);
3569    ALOGV("%s: X", __func__);
3570    return rc;
3571}
3572
3573/*===========================================================================
3574 * FUNCTION   : register_stream_buffers
3575 *
3576 * DESCRIPTION: Register stream buffers with the device
3577 *
3578 * PARAMETERS :
3579 *
3580 * RETURN     :
3581 *==========================================================================*/
3582int QCamera3HardwareInterface::register_stream_buffers(
3583        const struct camera3_device *device,
3584        const camera3_stream_buffer_set_t *buffer_set)
3585{
3586    ALOGV("%s: E", __func__);
3587    QCamera3HardwareInterface *hw =
3588        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3589    if (!hw) {
3590        ALOGE("%s: NULL camera device", __func__);
3591        return -ENODEV;
3592    }
3593    int rc = hw->registerStreamBuffers(buffer_set);
3594    ALOGV("%s: X", __func__);
3595    return rc;
3596}
3597
3598/*===========================================================================
3599 * FUNCTION   : construct_default_request_settings
3600 *
3601 * DESCRIPTION: Configure a settings buffer to meet the required use case
3602 *
3603 * PARAMETERS :
3604 *
3605 *
3606 * RETURN     : Success: Return valid metadata
3607 *              Failure: Return NULL
3608 *==========================================================================*/
3609const camera_metadata_t* QCamera3HardwareInterface::
3610    construct_default_request_settings(const struct camera3_device *device,
3611                                        int type)
3612{
3613
3614    ALOGV("%s: E", __func__);
3615    camera_metadata_t* fwk_metadata = NULL;
3616    QCamera3HardwareInterface *hw =
3617        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3618    if (!hw) {
3619        ALOGE("%s: NULL camera device", __func__);
3620        return NULL;
3621    }
3622
3623    fwk_metadata = hw->translateCapabilityToMetadata(type);
3624
3625    ALOGV("%s: X", __func__);
3626    return fwk_metadata;
3627}
3628
3629/*===========================================================================
3630 * FUNCTION   : process_capture_request
3631 *
3632 * DESCRIPTION:
3633 *
3634 * PARAMETERS :
3635 *
3636 *
3637 * RETURN     :
3638 *==========================================================================*/
3639int QCamera3HardwareInterface::process_capture_request(
3640                    const struct camera3_device *device,
3641                    camera3_capture_request_t *request)
3642{
3643    ALOGV("%s: E", __func__);
3644    QCamera3HardwareInterface *hw =
3645        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3646    if (!hw) {
3647        ALOGE("%s: NULL camera device", __func__);
3648        return -EINVAL;
3649    }
3650
3651    int rc = hw->processCaptureRequest(request);
3652    ALOGV("%s: X", __func__);
3653    return rc;
3654}
3655
3656/*===========================================================================
3657 * FUNCTION   : get_metadata_vendor_tag_ops
3658 *
3659 * DESCRIPTION:
3660 *
3661 * PARAMETERS :
3662 *
3663 *
3664 * RETURN     :
3665 *==========================================================================*/
3666
3667void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3668                const struct camera3_device *device,
3669                vendor_tag_query_ops_t* ops)
3670{
3671    ALOGV("%s: E", __func__);
3672    QCamera3HardwareInterface *hw =
3673        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3674    if (!hw) {
3675        ALOGE("%s: NULL camera device", __func__);
3676        return;
3677    }
3678
3679    hw->getMetadataVendorTagOps(ops);
3680    ALOGV("%s: X", __func__);
3681    return;
3682}
3683
3684/*===========================================================================
3685 * FUNCTION   : dump
3686 *
3687 * DESCRIPTION:
3688 *
3689 * PARAMETERS :
3690 *
3691 *
3692 * RETURN     :
3693 *==========================================================================*/
3694
3695void QCamera3HardwareInterface::dump(
3696                const struct camera3_device *device, int fd)
3697{
3698    ALOGV("%s: E", __func__);
3699    QCamera3HardwareInterface *hw =
3700        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3701    if (!hw) {
3702        ALOGE("%s: NULL camera device", __func__);
3703        return;
3704    }
3705
3706    hw->dump(fd);
3707    ALOGV("%s: X", __func__);
3708    return;
3709}
3710
3711/*===========================================================================
3712 * FUNCTION   : close_camera_device
3713 *
3714 * DESCRIPTION:
3715 *
3716 * PARAMETERS :
3717 *
3718 *
3719 * RETURN     :
3720 *==========================================================================*/
3721int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3722{
3723    ALOGV("%s: E", __func__);
3724    int ret = NO_ERROR;
3725    QCamera3HardwareInterface *hw =
3726        reinterpret_cast<QCamera3HardwareInterface *>(
3727            reinterpret_cast<camera3_device_t *>(device)->priv);
3728    if (!hw) {
3729        ALOGE("NULL camera device");
3730        return BAD_VALUE;
3731    }
3732    delete hw;
3733
3734    pthread_mutex_lock(&mCameraSessionLock);
3735    mCameraSessionActive = 0;
3736    pthread_mutex_unlock(&mCameraSessionLock);
3737    ALOGV("%s: X", __func__);
3738    return ret;
3739}
3740
3741/*===========================================================================
3742 * FUNCTION   : getWaveletDenoiseProcessPlate
3743 *
3744 * DESCRIPTION: query wavelet denoise process plate
3745 *
3746 * PARAMETERS : None
3747 *
3748 * RETURN     : WNR prcocess plate vlaue
3749 *==========================================================================*/
3750cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3751{
3752    char prop[PROPERTY_VALUE_MAX];
3753    memset(prop, 0, sizeof(prop));
3754    property_get("persist.denoise.process.plates", prop, "0");
3755    int processPlate = atoi(prop);
3756    switch(processPlate) {
3757    case 0:
3758        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3759    case 1:
3760        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3761    case 2:
3762        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3763    case 3:
3764        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3765    default:
3766        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3767    }
3768}
3769
3770/*===========================================================================
3771 * FUNCTION   : needRotationReprocess
3772 *
3773 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3774 *
3775 * PARAMETERS : none
3776 *
3777 * RETURN     : true: needed
3778 *              false: no need
3779 *==========================================================================*/
3780bool QCamera3HardwareInterface::needRotationReprocess()
3781{
3782
3783    if (!mJpegSettings->is_jpeg_format) {
3784        // RAW image, no need to reprocess
3785        return false;
3786    }
3787
3788    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3789        mJpegSettings->jpeg_orientation > 0) {
3790        // current rotation is not zero, and pp has the capability to process rotation
3791        ALOGD("%s: need do reprocess for rotation", __func__);
3792        return true;
3793    }
3794
3795    return false;
3796}
3797
3798/*===========================================================================
3799 * FUNCTION   : needReprocess
3800 *
3801 * DESCRIPTION: if reprocess in needed
3802 *
3803 * PARAMETERS : none
3804 *
3805 * RETURN     : true: needed
3806 *              false: no need
3807 *==========================================================================*/
3808bool QCamera3HardwareInterface::needReprocess()
3809{
3810    if (!mJpegSettings->is_jpeg_format) {
3811        // RAW image, no need to reprocess
3812        return false;
3813    }
3814
3815    if ((mJpegSettings->min_required_pp_mask > 0) ||
3816         isWNREnabled()) {
3817        // TODO: add for ZSL HDR later
3818        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3819        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3820        return true;
3821    }
3822    return needRotationReprocess();
3823}
3824
3825/*===========================================================================
3826 * FUNCTION   : addOnlineReprocChannel
3827 *
3828 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3829 *              coming from input channel
3830 *
3831 * PARAMETERS :
3832 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3833 *
3834 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3835 *==========================================================================*/
3836QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3837              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3838{
3839    int32_t rc = NO_ERROR;
3840    QCamera3ReprocessChannel *pChannel = NULL;
3841    if (pInputChannel == NULL) {
3842        ALOGE("%s: input channel obj is NULL", __func__);
3843        return NULL;
3844    }
3845
3846    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3847            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3848    if (NULL == pChannel) {
3849        ALOGE("%s: no mem for reprocess channel", __func__);
3850        return NULL;
3851    }
3852
3853    // Capture channel, only need snapshot and postview streams start together
3854    mm_camera_channel_attr_t attr;
3855    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3856    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3857    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3858    rc = pChannel->initialize();
3859    if (rc != NO_ERROR) {
3860        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3861        delete pChannel;
3862        return NULL;
3863    }
3864
3865    // pp feature config
3866    cam_pp_feature_config_t pp_config;
3867    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3868    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3869        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3870        pp_config.sharpness = 10;
3871    }
3872
3873    if (isWNREnabled()) {
3874        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3875        pp_config.denoise2d.denoise_enable = 1;
3876        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3877    }
3878    if (needRotationReprocess()) {
3879        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3880        int rotation = mJpegSettings->jpeg_orientation;
3881        if (rotation == 0) {
3882            pp_config.rotation = ROTATE_0;
3883        } else if (rotation == 90) {
3884            pp_config.rotation = ROTATE_90;
3885        } else if (rotation == 180) {
3886            pp_config.rotation = ROTATE_180;
3887        } else if (rotation == 270) {
3888            pp_config.rotation = ROTATE_270;
3889        }
3890    }
3891
3892   rc = pChannel->addReprocStreamsFromSource(pp_config,
3893                                             pInputChannel,
3894                                             mMetadataChannel);
3895
3896    if (rc != NO_ERROR) {
3897        delete pChannel;
3898        return NULL;
3899    }
3900    return pChannel;
3901}
3902
3903int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3904{
3905    return gCamCapability[mCameraId]->min_num_pp_bufs;
3906}
3907
3908bool QCamera3HardwareInterface::isWNREnabled() {
3909    return gCamCapability[mCameraId]->isWnrSupported;
3910}
3911
3912}; //end namespace qcamera
3913