QCamera3HWI.cpp revision 2123c5d141214ae0412840fc9931726a33e4fdac
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48
49#define MAX(a, b) ((a) > (b) ? (a) : (b))
50
51#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
52cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
53parm_buffer_t *prevSettings;
54const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
55
56pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
57    PTHREAD_MUTEX_INITIALIZER;
58unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
59
60const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
61    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
62    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
63    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
64    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
65    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
66    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
67    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
68    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
70};
71
72const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
73    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
74    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
75    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
76    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
77    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
78    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
79    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
81    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
82};
83
84const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
85    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
86    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
88    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
90    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
91    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
92    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
93    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
94    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
95    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
96    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
97    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
98    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
99    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
100};
101
102const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
103    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
104    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
105    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
106    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
107    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
116};
117
118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
119    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
120    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
122    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
124};
125
126const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
127    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
128    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
129    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
130};
131
132const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
133    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
135};
136
137const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
138                                             320, 240, 176, 144, 0, 0};
139
140camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
141    initialize:                         QCamera3HardwareInterface::initialize,
142    configure_streams:                  QCamera3HardwareInterface::configure_streams,
143    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
144    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
145    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
146    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
147    dump:                               QCamera3HardwareInterface::dump,
148};
149
150
151/*===========================================================================
152 * FUNCTION   : QCamera3HardwareInterface
153 *
154 * DESCRIPTION: constructor of QCamera3HardwareInterface
155 *
156 * PARAMETERS :
157 *   @cameraId  : camera ID
158 *
159 * RETURN     : none
160 *==========================================================================*/
161QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
162    : mCameraId(cameraId),
163      mCameraHandle(NULL),
164      mCameraOpened(false),
165      mCameraInitialized(false),
166      mCallbackOps(NULL),
167      mInputStream(NULL),
168      mMetadataChannel(NULL),
169      mPictureChannel(NULL),
170      mFirstRequest(false),
171      mParamHeap(NULL),
172      mParameters(NULL),
173      mJpegSettings(NULL),
174      mIsZslMode(false),
175      mMinProcessedFrameDuration(0),
176      mMinJpegFrameDuration(0),
177      mMinRawFrameDuration(0),
178      m_pPowerModule(NULL)
179{
180    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
181    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
182    mCameraDevice.common.close = close_camera_device;
183    mCameraDevice.ops = &mCameraOps;
184    mCameraDevice.priv = this;
185    gCamCapability[cameraId]->version = CAM_HAL_V3;
186    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
187    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
188    gCamCapability[cameraId]->min_num_pp_bufs = 3;
189
190    pthread_cond_init(&mRequestCond, NULL);
191    mPendingRequest = 0;
192    mCurrentRequestId = -1;
193    pthread_mutex_init(&mMutex, NULL);
194
195    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
196        mDefaultMetadata[i] = NULL;
197
198#ifdef HAS_MULTIMEDIA_HINTS
199    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
200        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
201    }
202#endif
203}
204
205/*===========================================================================
206 * FUNCTION   : ~QCamera3HardwareInterface
207 *
208 * DESCRIPTION: destructor of QCamera3HardwareInterface
209 *
210 * PARAMETERS : none
211 *
212 * RETURN     : none
213 *==========================================================================*/
214QCamera3HardwareInterface::~QCamera3HardwareInterface()
215{
216    ALOGV("%s: E", __func__);
217    /* We need to stop all streams before deleting any stream */
218        /*flush the metadata list*/
219    if (!mStoredMetadataList.empty()) {
220        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
221              m != mStoredMetadataList.end(); m++) {
222            mMetadataChannel->bufDone(m->meta_buf);
223            free(m->meta_buf);
224            m = mStoredMetadataList.erase(m);
225        }
226    }
227    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
228        it != mStreamInfo.end(); it++) {
229        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
230        if (channel)
231           channel->stop();
232    }
233    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
234        it != mStreamInfo.end(); it++) {
235        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
236        if (channel)
237            delete channel;
238        free (*it);
239    }
240
241    mPictureChannel = NULL;
242
243    if (mJpegSettings != NULL) {
244        free(mJpegSettings);
245        mJpegSettings = NULL;
246    }
247
248    /* Clean up all channels */
249    if (mCameraInitialized) {
250        mMetadataChannel->stop();
251        delete mMetadataChannel;
252        mMetadataChannel = NULL;
253        deinitParameters();
254    }
255
256    if (mCameraOpened)
257        closeCamera();
258
259    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
260        if (mDefaultMetadata[i])
261            free_camera_metadata(mDefaultMetadata[i]);
262
263    pthread_cond_destroy(&mRequestCond);
264
265    pthread_mutex_destroy(&mMutex);
266    ALOGV("%s: X", __func__);
267}
268
269/*===========================================================================
270 * FUNCTION   : openCamera
271 *
272 * DESCRIPTION: open camera
273 *
274 * PARAMETERS :
275 *   @hw_device  : double ptr for camera device struct
276 *
277 * RETURN     : int32_t type of status
278 *              NO_ERROR  -- success
279 *              none-zero failure code
280 *==========================================================================*/
281int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
282{
283    int rc = 0;
284    pthread_mutex_lock(&mCameraSessionLock);
285    if (mCameraSessionActive) {
286        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
287        pthread_mutex_unlock(&mCameraSessionLock);
288        return INVALID_OPERATION;
289    }
290
291    if (mCameraOpened) {
292        *hw_device = NULL;
293        return PERMISSION_DENIED;
294    }
295
296    rc = openCamera();
297    if (rc == 0) {
298        *hw_device = &mCameraDevice.common;
299        mCameraSessionActive = 1;
300    } else
301        *hw_device = NULL;
302
303#ifdef HAS_MULTIMEDIA_HINTS
304    if (rc == 0) {
305        if (m_pPowerModule) {
306            if (m_pPowerModule->powerHint) {
307                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
308                        (void *)"state=1");
309            }
310        }
311    }
312#endif
313    pthread_mutex_unlock(&mCameraSessionLock);
314    return rc;
315}
316
317/*===========================================================================
318 * FUNCTION   : openCamera
319 *
320 * DESCRIPTION: open camera
321 *
322 * PARAMETERS : none
323 *
324 * RETURN     : int32_t type of status
325 *              NO_ERROR  -- success
326 *              none-zero failure code
327 *==========================================================================*/
328int QCamera3HardwareInterface::openCamera()
329{
330    if (mCameraHandle) {
331        ALOGE("Failure: Camera already opened");
332        return ALREADY_EXISTS;
333    }
334    mCameraHandle = camera_open(mCameraId);
335    if (!mCameraHandle) {
336        ALOGE("camera_open failed.");
337        return UNKNOWN_ERROR;
338    }
339
340    mCameraOpened = true;
341
342    return NO_ERROR;
343}
344
345/*===========================================================================
346 * FUNCTION   : closeCamera
347 *
348 * DESCRIPTION: close camera
349 *
350 * PARAMETERS : none
351 *
352 * RETURN     : int32_t type of status
353 *              NO_ERROR  -- success
354 *              none-zero failure code
355 *==========================================================================*/
356int QCamera3HardwareInterface::closeCamera()
357{
358    int rc = NO_ERROR;
359
360    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
361    mCameraHandle = NULL;
362    mCameraOpened = false;
363
364#ifdef HAS_MULTIMEDIA_HINTS
365    if (rc == NO_ERROR) {
366        if (m_pPowerModule) {
367            if (m_pPowerModule->powerHint) {
368                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
369                        (void *)"state=0");
370            }
371        }
372    }
373#endif
374
375    return rc;
376}
377
378/*===========================================================================
379 * FUNCTION   : initialize
380 *
381 * DESCRIPTION: Initialize frameworks callback functions
382 *
383 * PARAMETERS :
384 *   @callback_ops : callback function to frameworks
385 *
386 * RETURN     :
387 *
388 *==========================================================================*/
389int QCamera3HardwareInterface::initialize(
390        const struct camera3_callback_ops *callback_ops)
391{
392    int rc;
393
394    pthread_mutex_lock(&mMutex);
395
396    rc = initParameters();
397    if (rc < 0) {
398        ALOGE("%s: initParamters failed %d", __func__, rc);
399       goto err1;
400    }
401    //Create metadata channel and initialize it
402    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
403                    mCameraHandle->ops, captureResultCb,
404                    &gCamCapability[mCameraId]->padding_info, this);
405    if (mMetadataChannel == NULL) {
406        ALOGE("%s: failed to allocate metadata channel", __func__);
407        rc = -ENOMEM;
408        goto err2;
409    }
410    rc = mMetadataChannel->initialize();
411    if (rc < 0) {
412        ALOGE("%s: metadata channel initialization failed", __func__);
413        goto err3;
414    }
415
416    mCallbackOps = callback_ops;
417
418    pthread_mutex_unlock(&mMutex);
419    mCameraInitialized = true;
420    return 0;
421
422err3:
423    delete mMetadataChannel;
424    mMetadataChannel = NULL;
425err2:
426    deinitParameters();
427err1:
428    pthread_mutex_unlock(&mMutex);
429    return rc;
430}
431
432/*===========================================================================
433 * FUNCTION   : configureStreams
434 *
435 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
436 *              and output streams.
437 *
438 * PARAMETERS :
439 *   @stream_list : streams to be configured
440 *
441 * RETURN     :
442 *
443 *==========================================================================*/
444int QCamera3HardwareInterface::configureStreams(
445        camera3_stream_configuration_t *streamList)
446{
447    int rc = 0;
448    mIsZslMode = false;
449    pthread_mutex_lock(&mMutex);
450    // Sanity check stream_list
451    if (streamList == NULL) {
452        ALOGE("%s: NULL stream configuration", __func__);
453        pthread_mutex_unlock(&mMutex);
454        return BAD_VALUE;
455    }
456
457    if (streamList->streams == NULL) {
458        ALOGE("%s: NULL stream list", __func__);
459        pthread_mutex_unlock(&mMutex);
460        return BAD_VALUE;
461    }
462
463    if (streamList->num_streams < 1) {
464        ALOGE("%s: Bad number of streams requested: %d", __func__,
465                streamList->num_streams);
466        pthread_mutex_unlock(&mMutex);
467        return BAD_VALUE;
468    }
469
470    camera3_stream_t *inputStream = NULL;
471    camera3_stream_t *jpegStream = NULL;
472    /* first invalidate all the steams in the mStreamList
473     * if they appear again, they will be validated */
474    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
475            it != mStreamInfo.end(); it++) {
476        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
477        channel->stop();
478        (*it)->status = INVALID;
479    }
480
481    for (size_t i = 0; i < streamList->num_streams; i++) {
482        camera3_stream_t *newStream = streamList->streams[i];
483        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
484                __func__, newStream->stream_type, newStream->format,
485                 newStream->width, newStream->height);
486        //if the stream is in the mStreamList validate it
487        bool stream_exists = false;
488        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
489                it != mStreamInfo.end(); it++) {
490            if ((*it)->stream == newStream) {
491                QCamera3Channel *channel =
492                    (QCamera3Channel*)(*it)->stream->priv;
493                stream_exists = true;
494                (*it)->status = RECONFIGURE;
495                /*delete the channel object associated with the stream because
496                  we need to reconfigure*/
497                delete channel;
498                (*it)->stream->priv = NULL;
499            }
500        }
501        if (!stream_exists) {
502            //new stream
503            stream_info_t* stream_info;
504            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
505            stream_info->stream = newStream;
506            stream_info->status = VALID;
507            stream_info->registered = 0;
508            mStreamInfo.push_back(stream_info);
509        }
510        if (newStream->stream_type == CAMERA3_STREAM_INPUT
511                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
512            if (inputStream != NULL) {
513                ALOGE("%s: Multiple input streams requested!", __func__);
514                pthread_mutex_unlock(&mMutex);
515                return BAD_VALUE;
516            }
517            inputStream = newStream;
518        }
519        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
520            jpegStream = newStream;
521        }
522    }
523    mInputStream = inputStream;
524
525    /*clean up invalid streams*/
526    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
527            it != mStreamInfo.end();) {
528        if(((*it)->status) == INVALID){
529            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
530            delete channel;
531            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
532            free(*it);
533            it = mStreamInfo.erase(it);
534        } else {
535            it++;
536        }
537    }
538
539    //mMetadataChannel->stop();
540
541    /* Allocate channel objects for the requested streams */
542    for (size_t i = 0; i < streamList->num_streams; i++) {
543        camera3_stream_t *newStream = streamList->streams[i];
544        if (newStream->priv == NULL) {
545            //New stream, construct channel
546            switch (newStream->stream_type) {
547            case CAMERA3_STREAM_INPUT:
548                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
549                break;
550            case CAMERA3_STREAM_BIDIRECTIONAL:
551                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
552                    GRALLOC_USAGE_HW_CAMERA_WRITE;
553                break;
554            case CAMERA3_STREAM_OUTPUT:
555                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
556                break;
557            default:
558                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
559                break;
560            }
561
562            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
563                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
564                QCamera3Channel *channel;
565                switch (newStream->format) {
566                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
567                case HAL_PIXEL_FORMAT_YCbCr_420_888:
568                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
569                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
570                        jpegStream) {
571                        uint32_t width = jpegStream->width;
572                        uint32_t height = jpegStream->height;
573                        mIsZslMode = true;
574                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
575                            mCameraHandle->ops, captureResultCb,
576                            &gCamCapability[mCameraId]->padding_info, this, newStream,
577                            width, height);
578                    } else
579                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
580                            mCameraHandle->ops, captureResultCb,
581                            &gCamCapability[mCameraId]->padding_info, this, newStream);
582                    if (channel == NULL) {
583                        ALOGE("%s: allocation of channel failed", __func__);
584                        pthread_mutex_unlock(&mMutex);
585                        return -ENOMEM;
586                    }
587
588                    newStream->priv = channel;
589                    break;
590                case HAL_PIXEL_FORMAT_BLOB:
591                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
592                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
593                            mCameraHandle->ops, captureResultCb,
594                            &gCamCapability[mCameraId]->padding_info, this, newStream);
595                    if (mPictureChannel == NULL) {
596                        ALOGE("%s: allocation of channel failed", __func__);
597                        pthread_mutex_unlock(&mMutex);
598                        return -ENOMEM;
599                    }
600                    newStream->priv = (QCamera3Channel*)mPictureChannel;
601                    break;
602
603                //TODO: Add support for app consumed format?
604                default:
605                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
606                    break;
607                }
608            }
609        } else {
610            // Channel already exists for this stream
611            // Do nothing for now
612        }
613    }
614    /*For the streams to be reconfigured we need to register the buffers
615      since the framework wont*/
616    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
617            it != mStreamInfo.end(); it++) {
618        if ((*it)->status == RECONFIGURE) {
619            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
620            /*only register buffers for streams that have already been
621              registered*/
622            if ((*it)->registered) {
623                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
624                        (*it)->buffer_set.buffers);
625                if (rc != NO_ERROR) {
626                    ALOGE("%s: Failed to register the buffers of old stream,\
627                            rc = %d", __func__, rc);
628                }
629                ALOGV("%s: channel %p has %d buffers",
630                        __func__, channel, (*it)->buffer_set.num_buffers);
631            }
632        }
633
634        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
635        if (index == NAME_NOT_FOUND) {
636            mPendingBuffersMap.add((*it)->stream, 0);
637        } else {
638            mPendingBuffersMap.editValueAt(index) = 0;
639        }
640    }
641
642    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
643    mPendingRequestsList.clear();
644
645    /*flush the metadata list*/
646    if (!mStoredMetadataList.empty()) {
647        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
648              m != mStoredMetadataList.end(); m++) {
649            mMetadataChannel->bufDone(m->meta_buf);
650            free(m->meta_buf);
651            m = mStoredMetadataList.erase(m);
652        }
653    }
654
655    //settings/parameters don't carry over for new configureStreams
656    memset(mParameters, 0, sizeof(parm_buffer_t));
657    mFirstRequest = true;
658
659    //Get min frame duration for this streams configuration
660    deriveMinFrameDuration();
661
662    pthread_mutex_unlock(&mMutex);
663    return rc;
664}
665
666/*===========================================================================
667 * FUNCTION   : validateCaptureRequest
668 *
669 * DESCRIPTION: validate a capture request from camera service
670 *
671 * PARAMETERS :
672 *   @request : request from framework to process
673 *
674 * RETURN     :
675 *
676 *==========================================================================*/
677int QCamera3HardwareInterface::validateCaptureRequest(
678                    camera3_capture_request_t *request)
679{
680    ssize_t idx = 0;
681    const camera3_stream_buffer_t *b;
682    CameraMetadata meta;
683
684    /* Sanity check the request */
685    if (request == NULL) {
686        ALOGE("%s: NULL capture request", __func__);
687        return BAD_VALUE;
688    }
689
690    uint32_t frameNumber = request->frame_number;
691    if (request->input_buffer != NULL &&
692            request->input_buffer->stream != mInputStream) {
693        ALOGE("%s: Request %d: Input buffer not from input stream!",
694                __FUNCTION__, frameNumber);
695        return BAD_VALUE;
696    }
697    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
698        ALOGE("%s: Request %d: No output buffers provided!",
699                __FUNCTION__, frameNumber);
700        return BAD_VALUE;
701    }
702    if (request->input_buffer != NULL) {
703        b = request->input_buffer;
704        QCamera3Channel *channel =
705            static_cast<QCamera3Channel*>(b->stream->priv);
706        if (channel == NULL) {
707            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
708                    __func__, frameNumber, idx);
709            return BAD_VALUE;
710        }
711        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
712            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
713                    __func__, frameNumber, idx);
714            return BAD_VALUE;
715        }
716        if (b->release_fence != -1) {
717            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
718                    __func__, frameNumber, idx);
719            return BAD_VALUE;
720        }
721        if (b->buffer == NULL) {
722            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
723                    __func__, frameNumber, idx);
724            return BAD_VALUE;
725        }
726    }
727
728    // Validate all buffers
729    b = request->output_buffers;
730    do {
731        QCamera3Channel *channel =
732                static_cast<QCamera3Channel*>(b->stream->priv);
733        if (channel == NULL) {
734            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
735                    __func__, frameNumber, idx);
736            return BAD_VALUE;
737        }
738        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
739            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
740                    __func__, frameNumber, idx);
741            return BAD_VALUE;
742        }
743        if (b->release_fence != -1) {
744            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
745                    __func__, frameNumber, idx);
746            return BAD_VALUE;
747        }
748        if (b->buffer == NULL) {
749            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
750                    __func__, frameNumber, idx);
751            return BAD_VALUE;
752        }
753        idx++;
754        b = request->output_buffers + idx;
755    } while (idx < (ssize_t)request->num_output_buffers);
756
757    return NO_ERROR;
758}
759
760/*===========================================================================
761 * FUNCTION   : deriveMinFrameDuration
762 *
763 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
764 *              on currently configured streams.
765 *
766 * PARAMETERS : NONE
767 *
768 * RETURN     : NONE
769 *
770 *==========================================================================*/
771void QCamera3HardwareInterface::deriveMinFrameDuration()
772{
773    int32_t maxJpegDimension, maxProcessedDimension;
774
775    maxJpegDimension = 0;
776    maxProcessedDimension = 0;
777
778    // Figure out maximum jpeg, processed, and raw dimensions
779    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
780        it != mStreamInfo.end(); it++) {
781
782        // Input stream doesn't have valid stream_type
783        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
784            continue;
785
786        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
787        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
788            if (dimension > maxJpegDimension)
789                maxJpegDimension = dimension;
790        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
791            if (dimension > maxProcessedDimension)
792                maxProcessedDimension = dimension;
793        }
794    }
795
796    //Assume all jpeg dimensions are in processed dimensions.
797    if (maxJpegDimension > maxProcessedDimension)
798        maxProcessedDimension = maxJpegDimension;
799
800    //Find minimum durations for processed, jpeg, and raw
801    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
802    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
803        if (maxProcessedDimension ==
804            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
805            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
806            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
807            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
808            break;
809        }
810    }
811}
812
813/*===========================================================================
814 * FUNCTION   : getMinFrameDuration
815 *
816 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
817 *              and current request configuration.
818 *
819 * PARAMETERS : @request: requset sent by the frameworks
820 *
821 * RETURN     : min farme duration for a particular request
822 *
823 *==========================================================================*/
824int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
825{
826    bool hasJpegStream = false;
827    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
828        const camera3_stream_t *stream = request->output_buffers[i].stream;
829        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
830            hasJpegStream = true;
831    }
832
833    if (!hasJpegStream)
834        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
835    else
836        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
837}
838
839/*===========================================================================
840 * FUNCTION   : registerStreamBuffers
841 *
842 * DESCRIPTION: Register buffers for a given stream with the HAL device.
843 *
844 * PARAMETERS :
845 *   @stream_list : streams to be configured
846 *
847 * RETURN     :
848 *
849 *==========================================================================*/
850int QCamera3HardwareInterface::registerStreamBuffers(
851        const camera3_stream_buffer_set_t *buffer_set)
852{
853    int rc = 0;
854
855    pthread_mutex_lock(&mMutex);
856
857    if (buffer_set == NULL) {
858        ALOGE("%s: Invalid buffer_set parameter.", __func__);
859        pthread_mutex_unlock(&mMutex);
860        return -EINVAL;
861    }
862    if (buffer_set->stream == NULL) {
863        ALOGE("%s: Invalid stream parameter.", __func__);
864        pthread_mutex_unlock(&mMutex);
865        return -EINVAL;
866    }
867    if (buffer_set->num_buffers < 1) {
868        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
869        pthread_mutex_unlock(&mMutex);
870        return -EINVAL;
871    }
872    if (buffer_set->buffers == NULL) {
873        ALOGE("%s: Invalid buffers parameter.", __func__);
874        pthread_mutex_unlock(&mMutex);
875        return -EINVAL;
876    }
877
878    camera3_stream_t *stream = buffer_set->stream;
879    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
880
881    //set the buffer_set in the mStreamInfo array
882    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
883            it != mStreamInfo.end(); it++) {
884        if ((*it)->stream == stream) {
885            uint32_t numBuffers = buffer_set->num_buffers;
886            (*it)->buffer_set.stream = buffer_set->stream;
887            (*it)->buffer_set.num_buffers = numBuffers;
888            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
889            if ((*it)->buffer_set.buffers == NULL) {
890                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
891                pthread_mutex_unlock(&mMutex);
892                return -ENOMEM;
893            }
894            for (size_t j = 0; j < numBuffers; j++){
895                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
896            }
897            (*it)->registered = 1;
898        }
899    }
900    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
901    if (rc < 0) {
902        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
903        pthread_mutex_unlock(&mMutex);
904        return -ENODEV;
905    }
906
907    pthread_mutex_unlock(&mMutex);
908    return NO_ERROR;
909}
910
911/*===========================================================================
912 * FUNCTION   : processCaptureRequest
913 *
914 * DESCRIPTION: process a capture request from camera service
915 *
916 * PARAMETERS :
917 *   @request : request from framework to process
918 *
919 * RETURN     :
920 *
921 *==========================================================================*/
922int QCamera3HardwareInterface::processCaptureRequest(
923                    camera3_capture_request_t *request)
924{
925    int rc = NO_ERROR;
926    int32_t request_id;
927    CameraMetadata meta;
928    MetadataBufferInfo reproc_meta;
929    int queueMetadata = 0;
930
931    pthread_mutex_lock(&mMutex);
932
933    rc = validateCaptureRequest(request);
934    if (rc != NO_ERROR) {
935        ALOGE("%s: incoming request is not valid", __func__);
936        pthread_mutex_unlock(&mMutex);
937        return rc;
938    }
939
940    meta = request->settings;
941
942    // For first capture request, send capture intent, and
943    // stream on all streams
944    if (mFirstRequest) {
945
946        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
947            int32_t hal_version = CAM_HAL_V3;
948            uint8_t captureIntent =
949                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
950
951            memset(mParameters, 0, sizeof(parm_buffer_t));
952            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
953            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
954                sizeof(hal_version), &hal_version);
955            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
956                sizeof(captureIntent), &captureIntent);
957            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
958                mParameters);
959        }
960
961        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
962            it != mStreamInfo.end(); it++) {
963            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
964            channel->start();
965        }
966    }
967
968    uint32_t frameNumber = request->frame_number;
969    uint32_t streamTypeMask = 0;
970
971    if (meta.exists(ANDROID_REQUEST_ID)) {
972        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
973        mCurrentRequestId = request_id;
974        ALOGV("%s: Received request with id: %d",__func__, request_id);
975    } else if (mFirstRequest || mCurrentRequestId == -1){
976        ALOGE("%s: Unable to find request id field, \
977                & no previous id available", __func__);
978        return NAME_NOT_FOUND;
979    } else {
980        ALOGV("%s: Re-using old request id", __func__);
981        request_id = mCurrentRequestId;
982    }
983
984    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
985                                    __func__, __LINE__,
986                                    request->num_output_buffers,
987                                    request->input_buffer,
988                                    frameNumber);
989    // Acquire all request buffers first
990    int blob_request = 0;
991    for (size_t i = 0; i < request->num_output_buffers; i++) {
992        const camera3_stream_buffer_t& output = request->output_buffers[i];
993        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
994        sp<Fence> acquireFence = new Fence(output.acquire_fence);
995
996        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
997        //Call function to store local copy of jpeg data for encode params.
998            blob_request = 1;
999            rc = getJpegSettings(request->settings);
1000            if (rc < 0) {
1001                ALOGE("%s: failed to get jpeg parameters", __func__);
1002                pthread_mutex_unlock(&mMutex);
1003                return rc;
1004            }
1005        }
1006
1007        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1008        if (rc != OK) {
1009            ALOGE("%s: fence wait failed %d", __func__, rc);
1010            pthread_mutex_unlock(&mMutex);
1011            return rc;
1012        }
1013        streamTypeMask |= channel->getStreamTypeMask();
1014    }
1015
1016    rc = setFrameParameters(request, streamTypeMask);
1017    if (rc < 0) {
1018        ALOGE("%s: fail to set frame parameters", __func__);
1019        pthread_mutex_unlock(&mMutex);
1020        return rc;
1021    }
1022
1023    /* Update pending request list and pending buffers map */
1024    PendingRequestInfo pendingRequest;
1025    pendingRequest.frame_number = frameNumber;
1026    pendingRequest.num_buffers = request->num_output_buffers;
1027    pendingRequest.request_id = request_id;
1028    pendingRequest.blob_request = blob_request;
1029    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1030
1031    for (size_t i = 0; i < request->num_output_buffers; i++) {
1032        RequestedBufferInfo requestedBuf;
1033        requestedBuf.stream = request->output_buffers[i].stream;
1034        requestedBuf.buffer = NULL;
1035        pendingRequest.buffers.push_back(requestedBuf);
1036
1037        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1038    }
1039    mPendingRequestsList.push_back(pendingRequest);
1040
1041    // Notify metadata channel we receive a request
1042    mMetadataChannel->request(NULL, frameNumber);
1043
1044    // Call request on other streams
1045    for (size_t i = 0; i < request->num_output_buffers; i++) {
1046        const camera3_stream_buffer_t& output = request->output_buffers[i];
1047        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1048        mm_camera_buf_def_t *pInputBuffer = NULL;
1049
1050        if (channel == NULL) {
1051            ALOGE("%s: invalid channel pointer for stream", __func__);
1052            continue;
1053        }
1054
1055        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1056            QCamera3RegularChannel* inputChannel = NULL;
1057            if(request->input_buffer != NULL){
1058                //Try to get the internal format
1059                inputChannel = (QCamera3RegularChannel*)
1060                    request->input_buffer->stream->priv;
1061                if(inputChannel == NULL ){
1062                    ALOGE("%s: failed to get input channel handle", __func__);
1063                } else {
1064                    pInputBuffer =
1065                        inputChannel->getInternalFormatBuffer(
1066                                request->input_buffer->buffer);
1067                    ALOGD("%s: Input buffer dump",__func__);
1068                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1069                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1070                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1071                    //TODO: need to get corresponding metadata and send it to pproc
1072                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1073                         m != mStoredMetadataList.end(); m++) {
1074                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1075                            reproc_meta.meta_buf = m->meta_buf;
1076                            m = mStoredMetadataList.erase(m);
1077                            queueMetadata = 1;
1078                            break;
1079                        }
1080                    }
1081                }
1082            }
1083            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1084                            pInputBuffer,(QCamera3Channel*)inputChannel);
1085            if (queueMetadata) {
1086                mPictureChannel->queueMetadata(reproc_meta.meta_buf);
1087            }
1088        } else {
1089            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1090                __LINE__, output.buffer, frameNumber);
1091            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1092                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1093                     m != mStoredMetadataList.end(); m++) {
1094                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1095                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1096                            mMetadataChannel->bufDone(m->meta_buf);
1097                            free(m->meta_buf);
1098                            m = mStoredMetadataList.erase(m);
1099                            break;
1100                        }
1101                   }
1102                }
1103            }
1104            rc = channel->request(output.buffer, frameNumber);
1105        }
1106        if (rc < 0)
1107            ALOGE("%s: request failed", __func__);
1108    }
1109
1110    mFirstRequest = false;
1111
1112    //Block on conditional variable
1113    mPendingRequest = 1;
1114    while (mPendingRequest == 1) {
1115        pthread_cond_wait(&mRequestCond, &mMutex);
1116    }
1117
1118    pthread_mutex_unlock(&mMutex);
1119    return rc;
1120}
1121
1122/*===========================================================================
1123 * FUNCTION   : getMetadataVendorTagOps
1124 *
1125 * DESCRIPTION:
1126 *
1127 * PARAMETERS :
1128 *
1129 *
1130 * RETURN     :
1131 *==========================================================================*/
1132void QCamera3HardwareInterface::getMetadataVendorTagOps(
1133                    vendor_tag_query_ops_t* /*ops*/)
1134{
1135    /* Enable locks when we eventually add Vendor Tags */
1136    /*
1137    pthread_mutex_lock(&mMutex);
1138
1139    pthread_mutex_unlock(&mMutex);
1140    */
1141    return;
1142}
1143
1144/*===========================================================================
1145 * FUNCTION   : dump
1146 *
1147 * DESCRIPTION:
1148 *
1149 * PARAMETERS :
1150 *
1151 *
1152 * RETURN     :
1153 *==========================================================================*/
1154void QCamera3HardwareInterface::dump(int /*fd*/)
1155{
1156    /*Enable lock when we implement this function*/
1157    /*
1158    pthread_mutex_lock(&mMutex);
1159
1160    pthread_mutex_unlock(&mMutex);
1161    */
1162    return;
1163}
1164
1165
1166/*===========================================================================
1167 * FUNCTION   : captureResultCb
1168 *
1169 * DESCRIPTION: Callback handler for all capture result
1170 *              (streams, as well as metadata)
1171 *
1172 * PARAMETERS :
1173 *   @metadata : metadata information
1174 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1175 *               NULL if metadata.
1176 *
1177 * RETURN     : NONE
1178 *==========================================================================*/
1179void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1180                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1181{
1182    pthread_mutex_lock(&mMutex);
1183
1184    if (metadata_buf) {
1185        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1186        int32_t frame_number_valid = *(int32_t *)
1187            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1188        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1189            CAM_INTF_META_PENDING_REQUESTS, metadata);
1190        uint32_t frame_number = *(uint32_t *)
1191            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1192        const struct timeval *tv = (const struct timeval *)
1193            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1194        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1195            tv->tv_usec * NSEC_PER_USEC;
1196
1197        if (!frame_number_valid) {
1198            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1199            mMetadataChannel->bufDone(metadata_buf);
1200            goto done_metadata;
1201        }
1202        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1203                frame_number, capture_time);
1204
1205        // Go through the pending requests info and send shutter/results to frameworks
1206        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1207                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1208            camera3_capture_result_t result;
1209            camera3_notify_msg_t notify_msg;
1210            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1211
1212            // Flush out all entries with less or equal frame numbers.
1213
1214            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1215            //Right now it's the same as metadata timestamp
1216
1217            //TODO: When there is metadata drop, how do we derive the timestamp of
1218            //dropped frames? For now, we fake the dropped timestamp by substracting
1219            //from the reported timestamp
1220            nsecs_t current_capture_time = capture_time -
1221                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1222
1223            // Send shutter notify to frameworks
1224            notify_msg.type = CAMERA3_MSG_SHUTTER;
1225            notify_msg.message.shutter.frame_number = i->frame_number;
1226            notify_msg.message.shutter.timestamp = current_capture_time;
1227            mCallbackOps->notify(mCallbackOps, &notify_msg);
1228            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1229                    i->frame_number, capture_time);
1230
1231            // Send empty metadata with already filled buffers for dropped metadata
1232            // and send valid metadata with already filled buffers for current metadata
1233            if (i->frame_number < frame_number) {
1234                CameraMetadata dummyMetadata;
1235                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1236                        &current_capture_time, 1);
1237                dummyMetadata.update(ANDROID_REQUEST_ID,
1238                        &(i->request_id), 1);
1239                result.result = dummyMetadata.release();
1240            } else {
1241                result.result = translateCbMetadataToResultMetadata(metadata,
1242                        current_capture_time, i->request_id);
1243                if (mIsZslMode) {
1244                   int found_metadata = 0;
1245                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1246                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1247                        j != i->buffers.end(); j++) {
1248                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1249                         //check if corresp. zsl already exists in the stored metadata list
1250                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1251                               m != mStoredMetadataList.begin(); m++) {
1252                            if (m->frame_number == frame_number) {
1253                               m->meta_buf = metadata_buf;
1254                               found_metadata = 1;
1255                               break;
1256                            }
1257                         }
1258                         if (!found_metadata) {
1259                            MetadataBufferInfo store_meta_info;
1260                            store_meta_info.meta_buf = metadata_buf;
1261                            store_meta_info.frame_number = frame_number;
1262                            mStoredMetadataList.push_back(store_meta_info);
1263                            found_metadata = 1;
1264                         }
1265                      }
1266                   }
1267                   if (!found_metadata) {
1268                       if (!i->input_buffer_present && i->blob_request) {
1269                          //livesnapshot or fallback non-zsl snapshot case
1270                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1271                                j != i->buffers.end(); j++){
1272                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1273                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1274                                 mPictureChannel->queueMetadata(metadata_buf);
1275                                 break;
1276                              }
1277                         }
1278                       } else {
1279                            //return the metadata immediately
1280                            mMetadataChannel->bufDone(metadata_buf);
1281                            free(metadata_buf);
1282                       }
1283                   }
1284               } else if (!mIsZslMode && i->blob_request) {
1285                   //If it is a blob request then send the metadata to the picture channel
1286                   mPictureChannel->queueMetadata(metadata_buf);
1287               } else {
1288                   // Return metadata buffer
1289                   mMetadataChannel->bufDone(metadata_buf);
1290                   free(metadata_buf);
1291               }
1292
1293            }
1294            if (!result.result) {
1295                ALOGE("%s: metadata is NULL", __func__);
1296            }
1297            result.frame_number = i->frame_number;
1298            result.num_output_buffers = 0;
1299            result.output_buffers = NULL;
1300            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1301                    j != i->buffers.end(); j++) {
1302                if (j->buffer) {
1303                    result.num_output_buffers++;
1304                }
1305            }
1306
1307            if (result.num_output_buffers > 0) {
1308                camera3_stream_buffer_t *result_buffers =
1309                    new camera3_stream_buffer_t[result.num_output_buffers];
1310                if (!result_buffers) {
1311                    ALOGE("%s: Fatal error: out of memory", __func__);
1312                }
1313                size_t result_buffers_idx = 0;
1314                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1315                        j != i->buffers.end(); j++) {
1316                    if (j->buffer) {
1317                        result_buffers[result_buffers_idx++] = *(j->buffer);
1318                        free(j->buffer);
1319                        j->buffer = NULL;
1320                        mPendingBuffersMap.editValueFor(j->stream)--;
1321                    }
1322                }
1323                result.output_buffers = result_buffers;
1324
1325                mCallbackOps->process_capture_result(mCallbackOps, &result);
1326                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1327                        __func__, result.frame_number, current_capture_time);
1328                free_camera_metadata((camera_metadata_t *)result.result);
1329                delete[] result_buffers;
1330            } else {
1331                mCallbackOps->process_capture_result(mCallbackOps, &result);
1332                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1333                        __func__, result.frame_number, current_capture_time);
1334                free_camera_metadata((camera_metadata_t *)result.result);
1335            }
1336            // erase the element from the list
1337            i = mPendingRequestsList.erase(i);
1338        }
1339
1340
1341done_metadata:
1342        bool max_buffers_dequeued = false;
1343        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1344            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1345            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1346            if (queued_buffers == stream->max_buffers) {
1347                max_buffers_dequeued = true;
1348                break;
1349            }
1350        }
1351        if (!max_buffers_dequeued && !pending_requests) {
1352            // Unblock process_capture_request
1353            mPendingRequest = 0;
1354            pthread_cond_signal(&mRequestCond);
1355        }
1356    } else {
1357        // If the frame number doesn't exist in the pending request list,
1358        // directly send the buffer to the frameworks, and update pending buffers map
1359        // Otherwise, book-keep the buffer.
1360        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1361        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1362            i++;
1363        }
1364        if (i == mPendingRequestsList.end()) {
1365            // Verify all pending requests frame_numbers are greater
1366            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1367                    j != mPendingRequestsList.end(); j++) {
1368                if (j->frame_number < frame_number) {
1369                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1370                            __func__, j->frame_number, frame_number);
1371                }
1372            }
1373            camera3_capture_result_t result;
1374            result.result = NULL;
1375            result.frame_number = frame_number;
1376            result.num_output_buffers = 1;
1377            result.output_buffers = buffer;
1378            ALOGV("%s: result frame_number = %d, buffer = %p",
1379                    __func__, frame_number, buffer);
1380            mPendingBuffersMap.editValueFor(buffer->stream)--;
1381            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1382                int found = 0;
1383                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1384                      k != mStoredMetadataList.end(); k++) {
1385                    if (k->frame_number == frame_number) {
1386                        k->zsl_buf_hdl = buffer->buffer;
1387                        found = 1;
1388                        break;
1389                    }
1390                }
1391                if (!found) {
1392                   MetadataBufferInfo meta_info;
1393                   meta_info.frame_number = frame_number;
1394                   meta_info.zsl_buf_hdl = buffer->buffer;
1395                   mStoredMetadataList.push_back(meta_info);
1396                }
1397            }
1398            mCallbackOps->process_capture_result(mCallbackOps, &result);
1399        } else {
1400            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1401                    j != i->buffers.end(); j++) {
1402                if (j->stream == buffer->stream) {
1403                    if (j->buffer != NULL) {
1404                        ALOGE("%s: Error: buffer is already set", __func__);
1405                    } else {
1406                        j->buffer = (camera3_stream_buffer_t *)malloc(
1407                                sizeof(camera3_stream_buffer_t));
1408                        *(j->buffer) = *buffer;
1409                        ALOGV("%s: cache buffer %p at result frame_number %d",
1410                                __func__, buffer, frame_number);
1411                    }
1412                }
1413            }
1414        }
1415    }
1416    pthread_mutex_unlock(&mMutex);
1417    return;
1418}
1419
1420/*===========================================================================
1421 * FUNCTION   : translateCbMetadataToResultMetadata
1422 *
1423 * DESCRIPTION:
1424 *
1425 * PARAMETERS :
1426 *   @metadata : metadata information from callback
1427 *
1428 * RETURN     : camera_metadata_t*
1429 *              metadata in a format specified by fwk
1430 *==========================================================================*/
1431camera_metadata_t*
1432QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1433                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1434                                 int32_t request_id)
1435{
1436    CameraMetadata camMetadata;
1437    camera_metadata_t* resultMetadata;
1438
1439    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1440    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1441
1442    /*CAM_INTF_META_HISTOGRAM - TODO*/
1443    /*cam_hist_stats_t  *histogram =
1444      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1445      metadata);*/
1446
1447    /*face detection*/
1448    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1449        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1450    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1451    int32_t faceIds[numFaces];
1452    uint8_t faceScores[numFaces];
1453    int32_t faceRectangles[numFaces * 4];
1454    int32_t faceLandmarks[numFaces * 6];
1455    int j = 0, k = 0;
1456    for (int i = 0; i < numFaces; i++) {
1457        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1458        faceScores[i] = faceDetectionInfo->faces[i].score;
1459        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1460                faceRectangles+j, -1);
1461        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1462        j+= 4;
1463        k+= 6;
1464    }
1465    if (numFaces > 0) {
1466        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1467        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1468        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1469            faceRectangles, numFaces*4);
1470        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1471            faceLandmarks, numFaces*6);
1472    }
1473
1474    uint8_t  *color_correct_mode =
1475        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1476    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1477
1478    int32_t  *ae_precapture_id =
1479        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1480    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1481
1482    /*aec regions*/
1483    cam_area_t  *hAeRegions =
1484        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1485    int32_t aeRegions[5];
1486    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1487    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1488
1489    uint8_t *ae_state =
1490            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1491    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1492
1493    uint8_t  *focusMode =
1494        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1495    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1496
1497    /*af regions*/
1498    cam_area_t  *hAfRegions =
1499        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1500    int32_t afRegions[5];
1501    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1502    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1503
1504    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1505    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1506
1507    int32_t  *afTriggerId =
1508        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1509    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1510
1511    uint8_t  *whiteBalance =
1512        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1513    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1514
1515    /*awb regions*/
1516    cam_area_t  *hAwbRegions =
1517        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1518    int32_t awbRegions[5];
1519    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1520    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1521
1522    uint8_t  *whiteBalanceState =
1523        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1524    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1525
1526    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1527    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1528
1529    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1530    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1531
1532    uint8_t  *flashPower =
1533        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1534    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1535
1536    int64_t  *flashFiringTime =
1537        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1538    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1539
1540    /*int32_t  *ledMode =
1541      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1542      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1543
1544    uint8_t  *flashState =
1545        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1546    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1547
1548    uint8_t  *hotPixelMode =
1549        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1550    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1551
1552    float  *lensAperture =
1553        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1554    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1555
1556    float  *filterDensity =
1557        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1558    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1559
1560    float  *focalLength =
1561        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1562    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1563
1564    float  *focusDistance =
1565        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1566    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1567
1568    float  *focusRange =
1569        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1570    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1571
1572    uint8_t  *opticalStab =
1573        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1574    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1575
1576    /*int32_t  *focusState =
1577      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1578      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1579
1580    uint8_t  *noiseRedMode =
1581        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1582    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1583
1584    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1585
1586    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1587        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1588    int32_t scalerCropRegion[4];
1589    scalerCropRegion[0] = hScalerCropRegion->left;
1590    scalerCropRegion[1] = hScalerCropRegion->top;
1591    scalerCropRegion[2] = hScalerCropRegion->width;
1592    scalerCropRegion[3] = hScalerCropRegion->height;
1593    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1594
1595    int64_t  *sensorExpTime =
1596        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1597    mMetadataResponse.exposure_time = *sensorExpTime;
1598    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1599    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1600
1601    int64_t  *sensorFameDuration =
1602        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1603    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1604    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1605
1606    int32_t  *sensorSensitivity =
1607        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1608    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1609    mMetadataResponse.iso_speed = *sensorSensitivity;
1610    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1611
1612    uint8_t  *shadingMode =
1613        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1614    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1615
1616    uint8_t  *faceDetectMode =
1617        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1618    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1619        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1620        *faceDetectMode);
1621    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1622
1623    uint8_t  *histogramMode =
1624        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1625    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1626
1627    uint8_t  *sharpnessMapMode =
1628        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1629    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1630            sharpnessMapMode, 1);
1631
1632    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1633    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1634        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1635    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1636            (int32_t*)sharpnessMap->sharpness,
1637            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1638
1639    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1640        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1641    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1642    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1643    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1644                       (float*)lensShadingMap->lens_shading,
1645                       4*map_width*map_height);
1646
1647    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1648        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1649    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1650
1651    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1652        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1653    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1654                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1655
1656    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1657        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1658    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1659                       predColorCorrectionGains->gains, 4);
1660
1661    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1662        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1663    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1664                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1665
1666    uint8_t *blackLevelLock = (uint8_t*)
1667        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1668    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1669
1670    uint8_t *sceneFlicker = (uint8_t*)
1671        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1672    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1673
1674
1675    resultMetadata = camMetadata.release();
1676    return resultMetadata;
1677}
1678
1679/*===========================================================================
1680 * FUNCTION   : convertToRegions
1681 *
1682 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1683 *
1684 * PARAMETERS :
1685 *   @rect   : cam_rect_t struct to convert
1686 *   @region : int32_t destination array
1687 *   @weight : if we are converting from cam_area_t, weight is valid
1688 *             else weight = -1
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1692    region[0] = rect.left;
1693    region[1] = rect.top;
1694    region[2] = rect.left + rect.width;
1695    region[3] = rect.top + rect.height;
1696    if (weight > -1) {
1697        region[4] = weight;
1698    }
1699}
1700
1701/*===========================================================================
1702 * FUNCTION   : convertFromRegions
1703 *
1704 * DESCRIPTION: helper method to convert from array to cam_rect_t
1705 *
1706 * PARAMETERS :
1707 *   @rect   : cam_rect_t struct to convert
1708 *   @region : int32_t destination array
1709 *   @weight : if we are converting from cam_area_t, weight is valid
1710 *             else weight = -1
1711 *
1712 *==========================================================================*/
1713void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1714                                                   const camera_metadata_t *settings,
1715                                                   uint32_t tag){
1716    CameraMetadata frame_settings;
1717    frame_settings = settings;
1718    int32_t x_min = frame_settings.find(tag).data.i32[0];
1719    int32_t y_min = frame_settings.find(tag).data.i32[1];
1720    int32_t x_max = frame_settings.find(tag).data.i32[2];
1721    int32_t y_max = frame_settings.find(tag).data.i32[3];
1722    roi->weight = frame_settings.find(tag).data.i32[4];
1723    roi->rect.left = x_min;
1724    roi->rect.top = y_min;
1725    roi->rect.width = x_max - x_min;
1726    roi->rect.height = y_max - y_min;
1727}
1728
1729/*===========================================================================
1730 * FUNCTION   : resetIfNeededROI
1731 *
1732 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1733 *              crop region
1734 *
1735 * PARAMETERS :
1736 *   @roi       : cam_area_t struct to resize
1737 *   @scalerCropRegion : cam_crop_region_t region to compare against
1738 *
1739 *
1740 *==========================================================================*/
1741bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1742                                                 const cam_crop_region_t* scalerCropRegion)
1743{
1744    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1745    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1746    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1747    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1748    if ((roi_x_max < scalerCropRegion->left) ||
1749        (roi_y_max < scalerCropRegion->top)  ||
1750        (roi->rect.left > crop_x_max) ||
1751        (roi->rect.top > crop_y_max)){
1752        return false;
1753    }
1754    if (roi->rect.left < scalerCropRegion->left) {
1755        roi->rect.left = scalerCropRegion->left;
1756    }
1757    if (roi->rect.top < scalerCropRegion->top) {
1758        roi->rect.top = scalerCropRegion->top;
1759    }
1760    if (roi_x_max > crop_x_max) {
1761        roi_x_max = crop_x_max;
1762    }
1763    if (roi_y_max > crop_y_max) {
1764        roi_y_max = crop_y_max;
1765    }
1766    roi->rect.width = roi_x_max - roi->rect.left;
1767    roi->rect.height = roi_y_max - roi->rect.top;
1768    return true;
1769}
1770
1771/*===========================================================================
1772 * FUNCTION   : convertLandmarks
1773 *
1774 * DESCRIPTION: helper method to extract the landmarks from face detection info
1775 *
1776 * PARAMETERS :
1777 *   @face   : cam_rect_t struct to convert
1778 *   @landmarks : int32_t destination array
1779 *
1780 *
1781 *==========================================================================*/
1782void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1783{
1784    landmarks[0] = face.left_eye_center.x;
1785    landmarks[1] = face.left_eye_center.y;
1786    landmarks[2] = face.right_eye_center.y;
1787    landmarks[3] = face.right_eye_center.y;
1788    landmarks[4] = face.mouth_center.x;
1789    landmarks[5] = face.mouth_center.y;
1790}
1791
1792#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1793/*===========================================================================
1794 * FUNCTION   : initCapabilities
1795 *
1796 * DESCRIPTION: initialize camera capabilities in static data struct
1797 *
1798 * PARAMETERS :
1799 *   @cameraId  : camera Id
1800 *
1801 * RETURN     : int32_t type of status
1802 *              NO_ERROR  -- success
1803 *              none-zero failure code
1804 *==========================================================================*/
1805int QCamera3HardwareInterface::initCapabilities(int cameraId)
1806{
1807    int rc = 0;
1808    mm_camera_vtbl_t *cameraHandle = NULL;
1809    QCamera3HeapMemory *capabilityHeap = NULL;
1810
1811    cameraHandle = camera_open(cameraId);
1812    if (!cameraHandle) {
1813        ALOGE("%s: camera_open failed", __func__);
1814        rc = -1;
1815        goto open_failed;
1816    }
1817
1818    capabilityHeap = new QCamera3HeapMemory();
1819    if (capabilityHeap == NULL) {
1820        ALOGE("%s: creation of capabilityHeap failed", __func__);
1821        goto heap_creation_failed;
1822    }
1823    /* Allocate memory for capability buffer */
1824    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1825    if(rc != OK) {
1826        ALOGE("%s: No memory for cappability", __func__);
1827        goto allocate_failed;
1828    }
1829
1830    /* Map memory for capability buffer */
1831    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1832    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1833                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1834                                capabilityHeap->getFd(0),
1835                                sizeof(cam_capability_t));
1836    if(rc < 0) {
1837        ALOGE("%s: failed to map capability buffer", __func__);
1838        goto map_failed;
1839    }
1840
1841    /* Query Capability */
1842    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1843    if(rc < 0) {
1844        ALOGE("%s: failed to query capability",__func__);
1845        goto query_failed;
1846    }
1847    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1848    if (!gCamCapability[cameraId]) {
1849        ALOGE("%s: out of memory", __func__);
1850        goto query_failed;
1851    }
1852    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1853                                        sizeof(cam_capability_t));
1854    rc = 0;
1855
1856query_failed:
1857    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1858                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1859map_failed:
1860    capabilityHeap->deallocate();
1861allocate_failed:
1862    delete capabilityHeap;
1863heap_creation_failed:
1864    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1865    cameraHandle = NULL;
1866open_failed:
1867    return rc;
1868}
1869
1870/*===========================================================================
1871 * FUNCTION   : initParameters
1872 *
1873 * DESCRIPTION: initialize camera parameters
1874 *
1875 * PARAMETERS :
1876 *
1877 * RETURN     : int32_t type of status
1878 *              NO_ERROR  -- success
1879 *              none-zero failure code
1880 *==========================================================================*/
1881int QCamera3HardwareInterface::initParameters()
1882{
1883    int rc = 0;
1884
1885    //Allocate Set Param Buffer
1886    mParamHeap = new QCamera3HeapMemory();
1887    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1888    if(rc != OK) {
1889        rc = NO_MEMORY;
1890        ALOGE("Failed to allocate SETPARM Heap memory");
1891        delete mParamHeap;
1892        mParamHeap = NULL;
1893        return rc;
1894    }
1895
1896    //Map memory for parameters buffer
1897    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1898            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1899            mParamHeap->getFd(0),
1900            sizeof(parm_buffer_t));
1901    if(rc < 0) {
1902        ALOGE("%s:failed to map SETPARM buffer",__func__);
1903        rc = FAILED_TRANSACTION;
1904        mParamHeap->deallocate();
1905        delete mParamHeap;
1906        mParamHeap = NULL;
1907        return rc;
1908    }
1909
1910    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1911    return rc;
1912}
1913
1914/*===========================================================================
1915 * FUNCTION   : deinitParameters
1916 *
1917 * DESCRIPTION: de-initialize camera parameters
1918 *
1919 * PARAMETERS :
1920 *
1921 * RETURN     : NONE
1922 *==========================================================================*/
1923void QCamera3HardwareInterface::deinitParameters()
1924{
1925    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1926            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1927
1928    mParamHeap->deallocate();
1929    delete mParamHeap;
1930    mParamHeap = NULL;
1931
1932    mParameters = NULL;
1933}
1934
1935/*===========================================================================
1936 * FUNCTION   : calcMaxJpegSize
1937 *
1938 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1939 *
1940 * PARAMETERS :
1941 *
1942 * RETURN     : max_jpeg_size
1943 *==========================================================================*/
1944int QCamera3HardwareInterface::calcMaxJpegSize()
1945{
1946    int32_t max_jpeg_size = 0;
1947    int temp_width, temp_height;
1948    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1949        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1950        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1951        if (temp_width * temp_height > max_jpeg_size ) {
1952            max_jpeg_size = temp_width * temp_height;
1953        }
1954    }
1955    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1956    return max_jpeg_size;
1957}
1958
1959/*===========================================================================
1960 * FUNCTION   : initStaticMetadata
1961 *
1962 * DESCRIPTION: initialize the static metadata
1963 *
1964 * PARAMETERS :
1965 *   @cameraId  : camera Id
1966 *
1967 * RETURN     : int32_t type of status
1968 *              0  -- success
1969 *              non-zero failure code
1970 *==========================================================================*/
1971int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1972{
1973    int rc = 0;
1974    CameraMetadata staticInfo;
1975
1976    /* android.info: hardware level */
1977    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1978    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1979        &supportedHardwareLevel, 1);
1980
1981    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1982    /*HAL 3 only*/
1983    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1984                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1985
1986    /*hard coded for now but this should come from sensor*/
1987    float min_focus_distance;
1988    if(facingBack){
1989        min_focus_distance = 10;
1990    } else {
1991        min_focus_distance = 0;
1992    }
1993    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1994                    &min_focus_distance, 1);
1995
1996    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1997                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1998
1999    /*should be using focal lengths but sensor doesn't provide that info now*/
2000    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2001                      &gCamCapability[cameraId]->focal_length,
2002                      1);
2003
2004    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2005                      gCamCapability[cameraId]->apertures,
2006                      gCamCapability[cameraId]->apertures_count);
2007
2008    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2009                gCamCapability[cameraId]->filter_densities,
2010                gCamCapability[cameraId]->filter_densities_count);
2011
2012
2013    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2014                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2015                      gCamCapability[cameraId]->optical_stab_modes_count);
2016
2017    staticInfo.update(ANDROID_LENS_POSITION,
2018                      gCamCapability[cameraId]->lens_position,
2019                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2020
2021    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2022                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2023    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2024                      lens_shading_map_size,
2025                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2026
2027    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2028                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2029    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2030            geo_correction_map_size,
2031            sizeof(geo_correction_map_size)/sizeof(int32_t));
2032
2033    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2034                       gCamCapability[cameraId]->geo_correction_map,
2035                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2036
2037    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2038            gCamCapability[cameraId]->sensor_physical_size, 2);
2039
2040    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2041            gCamCapability[cameraId]->exposure_time_range, 2);
2042
2043    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2044            &gCamCapability[cameraId]->max_frame_duration, 1);
2045
2046
2047    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2048                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2049
2050    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2051                                               gCamCapability[cameraId]->pixel_array_size.height};
2052    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2053                      pixel_array_size, 2);
2054
2055    int32_t active_array_size[] = {0, 0,
2056                                                gCamCapability[cameraId]->active_array_size.width,
2057                                                gCamCapability[cameraId]->active_array_size.height};
2058    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2059                      active_array_size, 4);
2060
2061    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2062            &gCamCapability[cameraId]->white_level, 1);
2063
2064    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2065            gCamCapability[cameraId]->black_level_pattern, 4);
2066
2067    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2068                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2069
2070    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2071                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2072
2073    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2074                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2075
2076    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2077                      &gCamCapability[cameraId]->histogram_size, 1);
2078
2079    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2080            &gCamCapability[cameraId]->max_histogram_count, 1);
2081
2082    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2083                                                gCamCapability[cameraId]->sharpness_map_size.height};
2084
2085    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2086            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2087
2088    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2089            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2090
2091
2092    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2093                      &gCamCapability[cameraId]->raw_min_duration,
2094                       1);
2095
2096    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2097                                                HAL_PIXEL_FORMAT_BLOB};
2098    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2099    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2100                      scalar_formats,
2101                      scalar_formats_count);
2102
2103    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2104    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2105              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2106              available_processed_sizes);
2107    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2108                available_processed_sizes,
2109                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2110
2111    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2112                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2113                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2114
2115    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2116    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2117                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2118                 available_fps_ranges);
2119    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2120            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2121
2122    camera_metadata_rational exposureCompensationStep = {
2123            gCamCapability[cameraId]->exp_compensation_step.numerator,
2124            gCamCapability[cameraId]->exp_compensation_step.denominator};
2125    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2126                      &exposureCompensationStep, 1);
2127
2128    /*TO DO*/
2129    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2130    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2131                      availableVstabModes, sizeof(availableVstabModes));
2132
2133    /*HAL 1 and HAL 3 common*/
2134    float maxZoom = 4;
2135    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2136            &maxZoom, 1);
2137
2138    int32_t max3aRegions = 1;
2139    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2140            &max3aRegions, 1);
2141
2142    uint8_t availableFaceDetectModes[] = {
2143            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2144            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2145    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2146                      availableFaceDetectModes,
2147                      sizeof(availableFaceDetectModes));
2148
2149    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2150                                       gCamCapability[cameraId]->raw_dim.height};
2151    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2152                      raw_size,
2153                      sizeof(raw_size)/sizeof(uint32_t));
2154
2155    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2156                                                        gCamCapability[cameraId]->exposure_compensation_max};
2157    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2158            exposureCompensationRange,
2159            sizeof(exposureCompensationRange)/sizeof(int32_t));
2160
2161    uint8_t lensFacing = (facingBack) ?
2162            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2163    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2164
2165    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2166                available_processed_sizes,
2167                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2168
2169    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2170                      available_thumbnail_sizes,
2171                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2172
2173    int32_t max_jpeg_size = 0;
2174    int temp_width, temp_height;
2175    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2176        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2177        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2178        if (temp_width * temp_height > max_jpeg_size ) {
2179            max_jpeg_size = temp_width * temp_height;
2180        }
2181    }
2182    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2183    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2184                      &max_jpeg_size, 1);
2185
2186    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2187    int32_t size = 0;
2188    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2189        int val = lookupFwkName(EFFECT_MODES_MAP,
2190                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2191                                   gCamCapability[cameraId]->supported_effects[i]);
2192        if (val != NAME_NOT_FOUND) {
2193            avail_effects[size] = (uint8_t)val;
2194            size++;
2195        }
2196    }
2197    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2198                      avail_effects,
2199                      size);
2200
2201    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2202    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2203    int32_t supported_scene_modes_cnt = 0;
2204    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2205        int val = lookupFwkName(SCENE_MODES_MAP,
2206                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2207                                gCamCapability[cameraId]->supported_scene_modes[i]);
2208        if (val != NAME_NOT_FOUND) {
2209            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2210            supported_indexes[supported_scene_modes_cnt] = i;
2211            supported_scene_modes_cnt++;
2212        }
2213    }
2214
2215    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2216                      avail_scene_modes,
2217                      supported_scene_modes_cnt);
2218
2219    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2220    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2221                      supported_scene_modes_cnt,
2222                      scene_mode_overrides,
2223                      supported_indexes,
2224                      cameraId);
2225    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2226                      scene_mode_overrides,
2227                      supported_scene_modes_cnt*3);
2228
2229    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2230    size = 0;
2231    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2232        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2233                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2234                                 gCamCapability[cameraId]->supported_antibandings[i]);
2235        if (val != NAME_NOT_FOUND) {
2236            avail_antibanding_modes[size] = (uint8_t)val;
2237            size++;
2238        }
2239
2240    }
2241    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2242                      avail_antibanding_modes,
2243                      size);
2244
2245    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2246    size = 0;
2247    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2248        int val = lookupFwkName(FOCUS_MODES_MAP,
2249                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2250                                gCamCapability[cameraId]->supported_focus_modes[i]);
2251        if (val != NAME_NOT_FOUND) {
2252            avail_af_modes[size] = (uint8_t)val;
2253            size++;
2254        }
2255    }
2256    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2257                      avail_af_modes,
2258                      size);
2259
2260    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2261    size = 0;
2262    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2263        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2264                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2265                                    gCamCapability[cameraId]->supported_white_balances[i]);
2266        if (val != NAME_NOT_FOUND) {
2267            avail_awb_modes[size] = (uint8_t)val;
2268            size++;
2269        }
2270    }
2271    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2272                      avail_awb_modes,
2273                      size);
2274
2275    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2276    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2277      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2278
2279    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2280            available_flash_levels,
2281            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2282
2283
2284    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2285    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2286            &flashAvailable, 1);
2287
2288    uint8_t avail_ae_modes[5];
2289    size = 0;
2290    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2291        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2292        size++;
2293    }
2294    if (flashAvailable) {
2295        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2296        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2297        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2298    }
2299    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2300                      avail_ae_modes,
2301                      size);
2302
2303    int32_t sensitivity_range[2];
2304    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2305    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2306    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2307                      sensitivity_range,
2308                      sizeof(sensitivity_range) / sizeof(int32_t));
2309
2310    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2311                      &gCamCapability[cameraId]->max_analog_sensitivity,
2312                      1);
2313
2314    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2315                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2316                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2317
2318    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2319    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2320                      &sensor_orientation,
2321                      1);
2322
2323    int32_t max_output_streams[3] = {1, 3, 1};
2324    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2325                      max_output_streams,
2326                      3);
2327
2328    gStaticMetadata[cameraId] = staticInfo.release();
2329    return rc;
2330}
2331
2332/*===========================================================================
2333 * FUNCTION   : makeTable
2334 *
2335 * DESCRIPTION: make a table of sizes
2336 *
2337 * PARAMETERS :
2338 *
2339 *
2340 *==========================================================================*/
2341void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2342                                          int32_t* sizeTable)
2343{
2344    int j = 0;
2345    for (int i = 0; i < size; i++) {
2346        sizeTable[j] = dimTable[i].width;
2347        sizeTable[j+1] = dimTable[i].height;
2348        j+=2;
2349    }
2350}
2351
2352/*===========================================================================
2353 * FUNCTION   : makeFPSTable
2354 *
2355 * DESCRIPTION: make a table of fps ranges
2356 *
2357 * PARAMETERS :
2358 *
2359 *==========================================================================*/
2360void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2361                                          int32_t* fpsRangesTable)
2362{
2363    int j = 0;
2364    for (int i = 0; i < size; i++) {
2365        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2366        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2367        j+=2;
2368    }
2369}
2370
2371/*===========================================================================
2372 * FUNCTION   : makeOverridesList
2373 *
2374 * DESCRIPTION: make a list of scene mode overrides
2375 *
2376 * PARAMETERS :
2377 *
2378 *
2379 *==========================================================================*/
2380void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2381                                                  uint8_t size, uint8_t* overridesList,
2382                                                  uint8_t* supported_indexes,
2383                                                  int camera_id)
2384{
2385    /*daemon will give a list of overrides for all scene modes.
2386      However we should send the fwk only the overrides for the scene modes
2387      supported by the framework*/
2388    int j = 0, index = 0, supt = 0;
2389    uint8_t focus_override;
2390    for (int i = 0; i < size; i++) {
2391        supt = 0;
2392        index = supported_indexes[i];
2393        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2394        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2395                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2396                                                    overridesTable[index].awb_mode);
2397        focus_override = (uint8_t)overridesTable[index].af_mode;
2398        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2399           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2400              supt = 1;
2401              break;
2402           }
2403        }
2404        if (supt) {
2405           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2406                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2407                                              focus_override);
2408        } else {
2409           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2410        }
2411        j+=3;
2412    }
2413}
2414
2415/*===========================================================================
2416 * FUNCTION   : getPreviewHalPixelFormat
2417 *
2418 * DESCRIPTION: convert the format to type recognized by framework
2419 *
2420 * PARAMETERS : format : the format from backend
2421 *
2422 ** RETURN    : format recognized by framework
2423 *
2424 *==========================================================================*/
2425int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2426{
2427    int32_t halPixelFormat;
2428
2429    switch (format) {
2430    case CAM_FORMAT_YUV_420_NV12:
2431        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2432        break;
2433    case CAM_FORMAT_YUV_420_NV21:
2434        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2435        break;
2436    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2437        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2438        break;
2439    case CAM_FORMAT_YUV_420_YV12:
2440        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2441        break;
2442    case CAM_FORMAT_YUV_422_NV16:
2443    case CAM_FORMAT_YUV_422_NV61:
2444    default:
2445        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2446        break;
2447    }
2448    return halPixelFormat;
2449}
2450
2451/*===========================================================================
2452 * FUNCTION   : getSensorSensitivity
2453 *
2454 * DESCRIPTION: convert iso_mode to an integer value
2455 *
2456 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2457 *
2458 ** RETURN    : sensitivity supported by sensor
2459 *
2460 *==========================================================================*/
2461int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2462{
2463    int32_t sensitivity;
2464
2465    switch (iso_mode) {
2466    case CAM_ISO_MODE_100:
2467        sensitivity = 100;
2468        break;
2469    case CAM_ISO_MODE_200:
2470        sensitivity = 200;
2471        break;
2472    case CAM_ISO_MODE_400:
2473        sensitivity = 400;
2474        break;
2475    case CAM_ISO_MODE_800:
2476        sensitivity = 800;
2477        break;
2478    case CAM_ISO_MODE_1600:
2479        sensitivity = 1600;
2480        break;
2481    default:
2482        sensitivity = -1;
2483        break;
2484    }
2485    return sensitivity;
2486}
2487
2488
2489/*===========================================================================
2490 * FUNCTION   : AddSetParmEntryToBatch
2491 *
2492 * DESCRIPTION: add set parameter entry into batch
2493 *
2494 * PARAMETERS :
2495 *   @p_table     : ptr to parameter buffer
2496 *   @paramType   : parameter type
2497 *   @paramLength : length of parameter value
2498 *   @paramValue  : ptr to parameter value
2499 *
2500 * RETURN     : int32_t type of status
2501 *              NO_ERROR  -- success
2502 *              none-zero failure code
2503 *==========================================================================*/
2504int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2505                                                          cam_intf_parm_type_t paramType,
2506                                                          uint32_t paramLength,
2507                                                          void *paramValue)
2508{
2509    int position = paramType;
2510    int current, next;
2511
2512    /*************************************************************************
2513    *                 Code to take care of linking next flags                *
2514    *************************************************************************/
2515    current = GET_FIRST_PARAM_ID(p_table);
2516    if (position == current){
2517        //DO NOTHING
2518    } else if (position < current){
2519        SET_NEXT_PARAM_ID(position, p_table, current);
2520        SET_FIRST_PARAM_ID(p_table, position);
2521    } else {
2522        /* Search for the position in the linked list where we need to slot in*/
2523        while (position > GET_NEXT_PARAM_ID(current, p_table))
2524            current = GET_NEXT_PARAM_ID(current, p_table);
2525
2526        /*If node already exists no need to alter linking*/
2527        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2528            next = GET_NEXT_PARAM_ID(current, p_table);
2529            SET_NEXT_PARAM_ID(current, p_table, position);
2530            SET_NEXT_PARAM_ID(position, p_table, next);
2531        }
2532    }
2533
2534    /*************************************************************************
2535    *                   Copy contents into entry                             *
2536    *************************************************************************/
2537
2538    if (paramLength > sizeof(parm_type_t)) {
2539        ALOGE("%s:Size of input larger than max entry size",__func__);
2540        return BAD_VALUE;
2541    }
2542    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2543    return NO_ERROR;
2544}
2545
2546/*===========================================================================
2547 * FUNCTION   : lookupFwkName
2548 *
2549 * DESCRIPTION: In case the enum is not same in fwk and backend
2550 *              make sure the parameter is correctly propogated
2551 *
2552 * PARAMETERS  :
2553 *   @arr      : map between the two enums
2554 *   @len      : len of the map
2555 *   @hal_name : name of the hal_parm to map
2556 *
2557 * RETURN     : int type of status
2558 *              fwk_name  -- success
2559 *              none-zero failure code
2560 *==========================================================================*/
2561int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2562                                             int len, int hal_name)
2563{
2564
2565    for (int i = 0; i < len; i++) {
2566        if (arr[i].hal_name == hal_name)
2567            return arr[i].fwk_name;
2568    }
2569
2570    /* Not able to find matching framework type is not necessarily
2571     * an error case. This happens when mm-camera supports more attributes
2572     * than the frameworks do */
2573    ALOGD("%s: Cannot find matching framework type", __func__);
2574    return NAME_NOT_FOUND;
2575}
2576
2577/*===========================================================================
2578 * FUNCTION   : lookupHalName
2579 *
2580 * DESCRIPTION: In case the enum is not same in fwk and backend
2581 *              make sure the parameter is correctly propogated
2582 *
2583 * PARAMETERS  :
2584 *   @arr      : map between the two enums
2585 *   @len      : len of the map
2586 *   @fwk_name : name of the hal_parm to map
2587 *
2588 * RETURN     : int32_t type of status
2589 *              hal_name  -- success
2590 *              none-zero failure code
2591 *==========================================================================*/
2592int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2593                                             int len, int fwk_name)
2594{
2595    for (int i = 0; i < len; i++) {
2596       if (arr[i].fwk_name == fwk_name)
2597           return arr[i].hal_name;
2598    }
2599    ALOGE("%s: Cannot find matching hal type", __func__);
2600    return NAME_NOT_FOUND;
2601}
2602
2603/*===========================================================================
2604 * FUNCTION   : getCapabilities
2605 *
2606 * DESCRIPTION: query camera capabilities
2607 *
2608 * PARAMETERS :
2609 *   @cameraId  : camera Id
2610 *   @info      : camera info struct to be filled in with camera capabilities
2611 *
2612 * RETURN     : int32_t type of status
2613 *              NO_ERROR  -- success
2614 *              none-zero failure code
2615 *==========================================================================*/
2616int QCamera3HardwareInterface::getCamInfo(int cameraId,
2617                                    struct camera_info *info)
2618{
2619    int rc = 0;
2620
2621    if (NULL == gCamCapability[cameraId]) {
2622        rc = initCapabilities(cameraId);
2623        if (rc < 0) {
2624            //pthread_mutex_unlock(&g_camlock);
2625            return rc;
2626        }
2627    }
2628
2629    if (NULL == gStaticMetadata[cameraId]) {
2630        rc = initStaticMetadata(cameraId);
2631        if (rc < 0) {
2632            return rc;
2633        }
2634    }
2635
2636    switch(gCamCapability[cameraId]->position) {
2637    case CAM_POSITION_BACK:
2638        info->facing = CAMERA_FACING_BACK;
2639        break;
2640
2641    case CAM_POSITION_FRONT:
2642        info->facing = CAMERA_FACING_FRONT;
2643        break;
2644
2645    default:
2646        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2647        rc = -1;
2648        break;
2649    }
2650
2651
2652    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2653    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2654    info->static_camera_characteristics = gStaticMetadata[cameraId];
2655
2656    return rc;
2657}
2658
2659/*===========================================================================
2660 * FUNCTION   : translateMetadata
2661 *
2662 * DESCRIPTION: translate the metadata into camera_metadata_t
2663 *
2664 * PARAMETERS : type of the request
2665 *
2666 *
2667 * RETURN     : success: camera_metadata_t*
2668 *              failure: NULL
2669 *
2670 *==========================================================================*/
2671camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2672{
2673    pthread_mutex_lock(&mMutex);
2674
2675    if (mDefaultMetadata[type] != NULL) {
2676        pthread_mutex_unlock(&mMutex);
2677        return mDefaultMetadata[type];
2678    }
2679    //first time we are handling this request
2680    //fill up the metadata structure using the wrapper class
2681    CameraMetadata settings;
2682    //translate from cam_capability_t to camera_metadata_tag_t
2683    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2684    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2685
2686    /*control*/
2687
2688    uint8_t controlIntent = 0;
2689    switch (type) {
2690      case CAMERA3_TEMPLATE_PREVIEW:
2691        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2692        break;
2693      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2694        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2695        break;
2696      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2697        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2698        break;
2699      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2700        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2701        break;
2702      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2703        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2704        break;
2705      default:
2706        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2707        break;
2708    }
2709    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2710
2711    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2712            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2713
2714    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2715    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2716
2717    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2718    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2719
2720    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2721    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2722
2723    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2724    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2725
2726    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2727    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2728
2729    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2730    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2731
2732    static uint8_t focusMode;
2733    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2734        ALOGE("%s: Setting focus mode to auto", __func__);
2735        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2736    } else {
2737        ALOGE("%s: Setting focus mode to off", __func__);
2738        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2739    }
2740    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2741
2742    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2743    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2744
2745    /*flash*/
2746    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2747    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2748
2749    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2750    settings.update(ANDROID_FLASH_FIRING_POWER,
2751            &flashFiringLevel, 1);
2752
2753    /* lens */
2754    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2755    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2756
2757    if (gCamCapability[mCameraId]->filter_densities_count) {
2758        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2759        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2760                        gCamCapability[mCameraId]->filter_densities_count);
2761    }
2762
2763    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2764    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2765
2766    /* frame duration */
2767    int64_t default_frame_duration = NSEC_PER_33MSEC;
2768    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2769
2770    /* sensitivity */
2771    int32_t default_sensitivity = 100;
2772    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2773
2774    mDefaultMetadata[type] = settings.release();
2775
2776    pthread_mutex_unlock(&mMutex);
2777    return mDefaultMetadata[type];
2778}
2779
2780/*===========================================================================
2781 * FUNCTION   : setFrameParameters
2782 *
2783 * DESCRIPTION: set parameters per frame as requested in the metadata from
2784 *              framework
2785 *
2786 * PARAMETERS :
2787 *   @request   : request that needs to be serviced
2788 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2789 *
2790 * RETURN     : success: NO_ERROR
2791 *              failure:
2792 *==========================================================================*/
2793int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2794                    uint32_t streamTypeMask)
2795{
2796    /*translate from camera_metadata_t type to parm_type_t*/
2797    int rc = 0;
2798    if (request->settings == NULL && mFirstRequest) {
2799        /*settings cannot be null for the first request*/
2800        return BAD_VALUE;
2801    }
2802
2803    int32_t hal_version = CAM_HAL_V3;
2804
2805    memset(mParameters, 0, sizeof(parm_buffer_t));
2806    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2807    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2808                sizeof(hal_version), &hal_version);
2809    if (rc < 0) {
2810        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2811        return BAD_VALUE;
2812    }
2813
2814    /*we need to update the frame number in the parameters*/
2815    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2816                                sizeof(request->frame_number), &(request->frame_number));
2817    if (rc < 0) {
2818        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2819        return BAD_VALUE;
2820    }
2821
2822    /* Update stream id mask where buffers are requested */
2823    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2824                                sizeof(streamTypeMask), &streamTypeMask);
2825    if (rc < 0) {
2826        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2827        return BAD_VALUE;
2828    }
2829
2830    if(request->settings != NULL){
2831        rc = translateMetadataToParameters(request);
2832    }
2833    /*set the parameters to backend*/
2834    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2835    return rc;
2836}
2837
2838/*===========================================================================
2839 * FUNCTION   : translateMetadataToParameters
2840 *
2841 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2842 *
2843 *
2844 * PARAMETERS :
2845 *   @request  : request sent from framework
2846 *
2847 *
2848 * RETURN     : success: NO_ERROR
2849 *              failure:
2850 *==========================================================================*/
2851int QCamera3HardwareInterface::translateMetadataToParameters
2852                                  (const camera3_capture_request_t *request)
2853{
2854    int rc = 0;
2855    CameraMetadata frame_settings;
2856    frame_settings = request->settings;
2857
2858    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2859        int32_t antibandingMode =
2860            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2861        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2862                sizeof(antibandingMode), &antibandingMode);
2863    }
2864
2865    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2866        int32_t expCompensation = frame_settings.find(
2867            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2868        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2869            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2870        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2871            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2872        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2873          sizeof(expCompensation), &expCompensation);
2874    }
2875
2876    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2877        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2878        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2879                sizeof(aeLock), &aeLock);
2880    }
2881    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2882        cam_fps_range_t fps_range;
2883        fps_range.min_fps =
2884            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2885        fps_range.max_fps =
2886            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2887        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2888                sizeof(fps_range), &fps_range);
2889    }
2890
2891    float focalDistance = -1.0;
2892    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2893        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2894        rc = AddSetParmEntryToBatch(mParameters,
2895                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2896                sizeof(focalDistance), &focalDistance);
2897    }
2898
2899    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2900        uint8_t fwk_focusMode =
2901            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2902        uint8_t focusMode;
2903        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2904            focusMode = CAM_FOCUS_MODE_INFINITY;
2905        } else{
2906         focusMode = lookupHalName(FOCUS_MODES_MAP,
2907                                   sizeof(FOCUS_MODES_MAP),
2908                                   fwk_focusMode);
2909        }
2910        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2911                sizeof(focusMode), &focusMode);
2912    }
2913
2914    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2915        uint8_t awbLock =
2916            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2917        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2918                sizeof(awbLock), &awbLock);
2919    }
2920
2921    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2922        uint8_t fwk_whiteLevel =
2923            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2924        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2925                sizeof(WHITE_BALANCE_MODES_MAP),
2926                fwk_whiteLevel);
2927        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2928                sizeof(whiteLevel), &whiteLevel);
2929    }
2930
2931    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2932        uint8_t fwk_effectMode =
2933            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2934        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2935                sizeof(EFFECT_MODES_MAP),
2936                fwk_effectMode);
2937        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2938                sizeof(effectMode), &effectMode);
2939    }
2940
2941    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2942        uint8_t fwk_aeMode =
2943            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2944        uint8_t aeMode;
2945        int32_t redeye;
2946
2947        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2948            aeMode = CAM_AE_MODE_OFF;
2949        } else {
2950            aeMode = CAM_AE_MODE_ON;
2951        }
2952        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2953            redeye = 1;
2954        } else {
2955            redeye = 0;
2956        }
2957
2958        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2959                                          sizeof(AE_FLASH_MODE_MAP),
2960                                          fwk_aeMode);
2961        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2962                sizeof(aeMode), &aeMode);
2963        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2964                sizeof(flashMode), &flashMode);
2965        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2966                sizeof(redeye), &redeye);
2967    }
2968
2969    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2970        uint8_t colorCorrectMode =
2971            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2972        rc =
2973            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2974                    sizeof(colorCorrectMode), &colorCorrectMode);
2975    }
2976
2977    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2978        cam_color_correct_gains_t colorCorrectGains;
2979        for (int i = 0; i < 4; i++) {
2980            colorCorrectGains.gains[i] =
2981                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2982        }
2983        rc =
2984            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2985                    sizeof(colorCorrectGains), &colorCorrectGains);
2986    }
2987
2988    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2989        cam_color_correct_matrix_t colorCorrectTransform;
2990        cam_rational_type_t transform_elem;
2991        int num = 0;
2992        for (int i = 0; i < 3; i++) {
2993           for (int j = 0; j < 3; j++) {
2994              transform_elem.numerator =
2995                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2996              transform_elem.denominator =
2997                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2998              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2999              num++;
3000           }
3001        }
3002        rc =
3003            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3004                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3005    }
3006
3007    cam_trigger_t aecTrigger;
3008    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3009    aecTrigger.trigger_id = -1;
3010    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3011        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3012        aecTrigger.trigger =
3013            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3014        aecTrigger.trigger_id =
3015            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3016    }
3017    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3018                                sizeof(aecTrigger), &aecTrigger);
3019
3020    /*af_trigger must come with a trigger id*/
3021    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3022        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3023        cam_trigger_t af_trigger;
3024        af_trigger.trigger =
3025            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3026        af_trigger.trigger_id =
3027            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3028        rc = AddSetParmEntryToBatch(mParameters,
3029                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3030    }
3031
3032    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3033        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3034        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3035                sizeof(metaMode), &metaMode);
3036        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3037           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3038           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3039                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3040                                             fwk_sceneMode);
3041           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3042                sizeof(sceneMode), &sceneMode);
3043        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3044           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3045           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3046                sizeof(sceneMode), &sceneMode);
3047        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3048           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3049           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3050                sizeof(sceneMode), &sceneMode);
3051        }
3052    }
3053
3054    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3055        int32_t demosaic =
3056            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3057        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3058                sizeof(demosaic), &demosaic);
3059    }
3060
3061    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3062        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3063        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3064                sizeof(edgeMode), &edgeMode);
3065    }
3066
3067    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3068        int32_t edgeStrength =
3069            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3070        rc = AddSetParmEntryToBatch(mParameters,
3071                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3072    }
3073
3074    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3075        int32_t respectFlashMode = 1;
3076        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3077            uint8_t fwk_aeMode =
3078                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3079            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3080                respectFlashMode = 0;
3081                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3082                    __func__);
3083            }
3084        }
3085        if (respectFlashMode) {
3086            uint8_t flashMode =
3087                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3088            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3089                                          sizeof(FLASH_MODES_MAP),
3090                                          flashMode);
3091            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3092            // To check: CAM_INTF_META_FLASH_MODE usage
3093            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3094                          sizeof(flashMode), &flashMode);
3095        }
3096    }
3097
3098    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3099        uint8_t flashPower =
3100            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3101        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3102                sizeof(flashPower), &flashPower);
3103    }
3104
3105    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3106        int64_t flashFiringTime =
3107            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3108        rc = AddSetParmEntryToBatch(mParameters,
3109                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3110    }
3111
3112    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3113        uint8_t geometricMode =
3114            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3115        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3116                sizeof(geometricMode), &geometricMode);
3117    }
3118
3119    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3120        uint8_t geometricStrength =
3121            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3122        rc = AddSetParmEntryToBatch(mParameters,
3123                CAM_INTF_META_GEOMETRIC_STRENGTH,
3124                sizeof(geometricStrength), &geometricStrength);
3125    }
3126
3127    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3128        uint8_t hotPixelMode =
3129            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3130        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3131                sizeof(hotPixelMode), &hotPixelMode);
3132    }
3133
3134    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3135        float lensAperture =
3136            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3137        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3138                sizeof(lensAperture), &lensAperture);
3139    }
3140
3141    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3142        float filterDensity =
3143            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3144        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3145                sizeof(filterDensity), &filterDensity);
3146    }
3147
3148    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3149        float focalLength =
3150            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3151        rc = AddSetParmEntryToBatch(mParameters,
3152                CAM_INTF_META_LENS_FOCAL_LENGTH,
3153                sizeof(focalLength), &focalLength);
3154    }
3155
3156    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3157        uint8_t optStabMode =
3158            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3159        rc = AddSetParmEntryToBatch(mParameters,
3160                CAM_INTF_META_LENS_OPT_STAB_MODE,
3161                sizeof(optStabMode), &optStabMode);
3162    }
3163
3164    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3165        uint8_t noiseRedMode =
3166            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3167        rc = AddSetParmEntryToBatch(mParameters,
3168                CAM_INTF_META_NOISE_REDUCTION_MODE,
3169                sizeof(noiseRedMode), &noiseRedMode);
3170    }
3171
3172    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3173        uint8_t noiseRedStrength =
3174            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3175        rc = AddSetParmEntryToBatch(mParameters,
3176                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3177                sizeof(noiseRedStrength), &noiseRedStrength);
3178    }
3179
3180    cam_crop_region_t scalerCropRegion;
3181    bool scalerCropSet = false;
3182    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3183        scalerCropRegion.left =
3184            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3185        scalerCropRegion.top =
3186            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3187        scalerCropRegion.width =
3188            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3189        scalerCropRegion.height =
3190            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3191        rc = AddSetParmEntryToBatch(mParameters,
3192                CAM_INTF_META_SCALER_CROP_REGION,
3193                sizeof(scalerCropRegion), &scalerCropRegion);
3194        scalerCropSet = true;
3195    }
3196
3197    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3198        int64_t sensorExpTime =
3199            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3200        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3201        rc = AddSetParmEntryToBatch(mParameters,
3202                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3203                sizeof(sensorExpTime), &sensorExpTime);
3204    }
3205
3206    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3207        int64_t sensorFrameDuration =
3208            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3209        int64_t minFrameDuration = getMinFrameDuration(request);
3210        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3211        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3212            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3213        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3214        rc = AddSetParmEntryToBatch(mParameters,
3215                CAM_INTF_META_SENSOR_FRAME_DURATION,
3216                sizeof(sensorFrameDuration), &sensorFrameDuration);
3217    }
3218
3219    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3220        int32_t sensorSensitivity =
3221            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3222        if (sensorSensitivity <
3223                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3224            sensorSensitivity =
3225                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3226        if (sensorSensitivity >
3227                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3228            sensorSensitivity =
3229                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3230        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3231        rc = AddSetParmEntryToBatch(mParameters,
3232                CAM_INTF_META_SENSOR_SENSITIVITY,
3233                sizeof(sensorSensitivity), &sensorSensitivity);
3234    }
3235
3236    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3237        int32_t shadingMode =
3238            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3239        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3240                sizeof(shadingMode), &shadingMode);
3241    }
3242
3243    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3244        uint8_t shadingStrength =
3245            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3246        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3247                sizeof(shadingStrength), &shadingStrength);
3248    }
3249
3250    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3251        uint8_t fwk_facedetectMode =
3252            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3253        uint8_t facedetectMode =
3254            lookupHalName(FACEDETECT_MODES_MAP,
3255                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3256        rc = AddSetParmEntryToBatch(mParameters,
3257                CAM_INTF_META_STATS_FACEDETECT_MODE,
3258                sizeof(facedetectMode), &facedetectMode);
3259    }
3260
3261    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3262        uint8_t histogramMode =
3263            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3264        rc = AddSetParmEntryToBatch(mParameters,
3265                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3266                sizeof(histogramMode), &histogramMode);
3267    }
3268
3269    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3270        uint8_t sharpnessMapMode =
3271            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3272        rc = AddSetParmEntryToBatch(mParameters,
3273                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3274                sizeof(sharpnessMapMode), &sharpnessMapMode);
3275    }
3276
3277    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3278        uint8_t tonemapMode =
3279            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3280        rc = AddSetParmEntryToBatch(mParameters,
3281                CAM_INTF_META_TONEMAP_MODE,
3282                sizeof(tonemapMode), &tonemapMode);
3283    }
3284    int point = 0;
3285    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3286        cam_tonemap_curve_t tonemapCurveBlue;
3287        tonemapCurveBlue.tonemap_points_cnt =
3288           gCamCapability[mCameraId]->max_tone_map_curve_points;
3289        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3290            for (int j = 0; j < 2; j++) {
3291               tonemapCurveBlue.tonemap_points[i][j] =
3292                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3293               point++;
3294            }
3295        }
3296        rc = AddSetParmEntryToBatch(mParameters,
3297                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3298                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3299    }
3300    point = 0;
3301    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3302        cam_tonemap_curve_t tonemapCurveGreen;
3303        tonemapCurveGreen.tonemap_points_cnt =
3304           gCamCapability[mCameraId]->max_tone_map_curve_points;
3305        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3306            for (int j = 0; j < 2; j++) {
3307               tonemapCurveGreen.tonemap_points[i][j] =
3308                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3309               point++;
3310            }
3311        }
3312        rc = AddSetParmEntryToBatch(mParameters,
3313                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3314                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3315    }
3316    point = 0;
3317    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3318        cam_tonemap_curve_t tonemapCurveRed;
3319        tonemapCurveRed.tonemap_points_cnt =
3320           gCamCapability[mCameraId]->max_tone_map_curve_points;
3321        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3322            for (int j = 0; j < 2; j++) {
3323               tonemapCurveRed.tonemap_points[i][j] =
3324                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3325               point++;
3326            }
3327        }
3328        rc = AddSetParmEntryToBatch(mParameters,
3329                CAM_INTF_META_TONEMAP_CURVE_RED,
3330                sizeof(tonemapCurveRed), &tonemapCurveRed);
3331    }
3332
3333    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3334        uint8_t captureIntent =
3335            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3336        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3337                sizeof(captureIntent), &captureIntent);
3338    }
3339
3340    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3341        uint8_t blackLevelLock =
3342            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3343        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3344                sizeof(blackLevelLock), &blackLevelLock);
3345    }
3346
3347    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3348        uint8_t lensShadingMapMode =
3349            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3350        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3351                sizeof(lensShadingMapMode), &lensShadingMapMode);
3352    }
3353
3354    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3355        cam_area_t roi;
3356        bool reset = true;
3357        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3358        if (scalerCropSet) {
3359            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3360        }
3361        if (reset) {
3362            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3363                    sizeof(roi), &roi);
3364        }
3365    }
3366
3367    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3368        cam_area_t roi;
3369        bool reset = true;
3370        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3371        if (scalerCropSet) {
3372            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3373        }
3374        if (reset) {
3375            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3376                    sizeof(roi), &roi);
3377        }
3378    }
3379
3380    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3381        cam_area_t roi;
3382        bool reset = true;
3383        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3384        if (scalerCropSet) {
3385            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3386        }
3387        if (reset) {
3388            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3389                    sizeof(roi), &roi);
3390        }
3391    }
3392    return rc;
3393}
3394
3395/*===========================================================================
3396 * FUNCTION   : getJpegSettings
3397 *
3398 * DESCRIPTION: save the jpeg settings in the HAL
3399 *
3400 *
3401 * PARAMETERS :
3402 *   @settings  : frame settings information from framework
3403 *
3404 *
3405 * RETURN     : success: NO_ERROR
3406 *              failure:
3407 *==========================================================================*/
3408int QCamera3HardwareInterface::getJpegSettings
3409                                  (const camera_metadata_t *settings)
3410{
3411    if (mJpegSettings) {
3412        if (mJpegSettings->gps_timestamp) {
3413            free(mJpegSettings->gps_timestamp);
3414            mJpegSettings->gps_timestamp = NULL;
3415        }
3416        if (mJpegSettings->gps_coordinates) {
3417            for (int i = 0; i < 3; i++) {
3418                free(mJpegSettings->gps_coordinates[i]);
3419                mJpegSettings->gps_coordinates[i] = NULL;
3420            }
3421        }
3422        free(mJpegSettings);
3423        mJpegSettings = NULL;
3424    }
3425    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3426    CameraMetadata jpeg_settings;
3427    jpeg_settings = settings;
3428
3429    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3430        mJpegSettings->jpeg_orientation =
3431            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3432    } else {
3433        mJpegSettings->jpeg_orientation = 0;
3434    }
3435    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3436        mJpegSettings->jpeg_quality =
3437            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3438    } else {
3439        mJpegSettings->jpeg_quality = 85;
3440    }
3441    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3442        mJpegSettings->thumbnail_size.width =
3443            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3444        mJpegSettings->thumbnail_size.height =
3445            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3446    } else {
3447        mJpegSettings->thumbnail_size.width = 0;
3448        mJpegSettings->thumbnail_size.height = 0;
3449    }
3450    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3451        for (int i = 0; i < 3; i++) {
3452            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3453            *(mJpegSettings->gps_coordinates[i]) =
3454                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3455        }
3456    } else{
3457       for (int i = 0; i < 3; i++) {
3458            mJpegSettings->gps_coordinates[i] = NULL;
3459        }
3460    }
3461
3462    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3463        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3464        *(mJpegSettings->gps_timestamp) =
3465            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3466    } else {
3467        mJpegSettings->gps_timestamp = NULL;
3468    }
3469
3470    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3471        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3472        for (int i = 0; i < len; i++) {
3473            mJpegSettings->gps_processing_method[i] =
3474                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3475        }
3476        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3477            mJpegSettings->gps_processing_method[len] = '\0';
3478        }
3479    } else {
3480        mJpegSettings->gps_processing_method[0] = '\0';
3481    }
3482
3483    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3484        mJpegSettings->sensor_sensitivity =
3485            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3486    } else {
3487        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3488    }
3489
3490    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3491
3492    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3493        mJpegSettings->lens_focal_length =
3494            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3495    }
3496    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3497        mJpegSettings->exposure_compensation =
3498            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3499    }
3500    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3501    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3502    mJpegSettings->is_jpeg_format = true;
3503    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3504    return 0;
3505}
3506
3507/*===========================================================================
3508 * FUNCTION   : captureResultCb
3509 *
3510 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3511 *
3512 * PARAMETERS :
3513 *   @frame  : frame information from mm-camera-interface
3514 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3515 *   @userdata: userdata
3516 *
3517 * RETURN     : NONE
3518 *==========================================================================*/
3519void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3520                camera3_stream_buffer_t *buffer,
3521                uint32_t frame_number, void *userdata)
3522{
3523    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3524    if (hw == NULL) {
3525        ALOGE("%s: Invalid hw %p", __func__, hw);
3526        return;
3527    }
3528
3529    hw->captureResultCb(metadata, buffer, frame_number);
3530    return;
3531}
3532
3533
3534/*===========================================================================
3535 * FUNCTION   : initialize
3536 *
3537 * DESCRIPTION: Pass framework callback pointers to HAL
3538 *
3539 * PARAMETERS :
3540 *
3541 *
3542 * RETURN     : Success : 0
3543 *              Failure: -ENODEV
3544 *==========================================================================*/
3545
3546int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3547                                  const camera3_callback_ops_t *callback_ops)
3548{
3549    ALOGV("%s: E", __func__);
3550    QCamera3HardwareInterface *hw =
3551        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3552    if (!hw) {
3553        ALOGE("%s: NULL camera device", __func__);
3554        return -ENODEV;
3555    }
3556
3557    int rc = hw->initialize(callback_ops);
3558    ALOGV("%s: X", __func__);
3559    return rc;
3560}
3561
3562/*===========================================================================
3563 * FUNCTION   : configure_streams
3564 *
3565 * DESCRIPTION:
3566 *
3567 * PARAMETERS :
3568 *
3569 *
3570 * RETURN     : Success: 0
3571 *              Failure: -EINVAL (if stream configuration is invalid)
3572 *                       -ENODEV (fatal error)
3573 *==========================================================================*/
3574
3575int QCamera3HardwareInterface::configure_streams(
3576        const struct camera3_device *device,
3577        camera3_stream_configuration_t *stream_list)
3578{
3579    ALOGV("%s: E", __func__);
3580    QCamera3HardwareInterface *hw =
3581        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3582    if (!hw) {
3583        ALOGE("%s: NULL camera device", __func__);
3584        return -ENODEV;
3585    }
3586    int rc = hw->configureStreams(stream_list);
3587    ALOGV("%s: X", __func__);
3588    return rc;
3589}
3590
3591/*===========================================================================
3592 * FUNCTION   : register_stream_buffers
3593 *
3594 * DESCRIPTION: Register stream buffers with the device
3595 *
3596 * PARAMETERS :
3597 *
3598 * RETURN     :
3599 *==========================================================================*/
3600int QCamera3HardwareInterface::register_stream_buffers(
3601        const struct camera3_device *device,
3602        const camera3_stream_buffer_set_t *buffer_set)
3603{
3604    ALOGV("%s: E", __func__);
3605    QCamera3HardwareInterface *hw =
3606        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3607    if (!hw) {
3608        ALOGE("%s: NULL camera device", __func__);
3609        return -ENODEV;
3610    }
3611    int rc = hw->registerStreamBuffers(buffer_set);
3612    ALOGV("%s: X", __func__);
3613    return rc;
3614}
3615
3616/*===========================================================================
3617 * FUNCTION   : construct_default_request_settings
3618 *
3619 * DESCRIPTION: Configure a settings buffer to meet the required use case
3620 *
3621 * PARAMETERS :
3622 *
3623 *
3624 * RETURN     : Success: Return valid metadata
3625 *              Failure: Return NULL
3626 *==========================================================================*/
3627const camera_metadata_t* QCamera3HardwareInterface::
3628    construct_default_request_settings(const struct camera3_device *device,
3629                                        int type)
3630{
3631
3632    ALOGV("%s: E", __func__);
3633    camera_metadata_t* fwk_metadata = NULL;
3634    QCamera3HardwareInterface *hw =
3635        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3636    if (!hw) {
3637        ALOGE("%s: NULL camera device", __func__);
3638        return NULL;
3639    }
3640
3641    fwk_metadata = hw->translateCapabilityToMetadata(type);
3642
3643    ALOGV("%s: X", __func__);
3644    return fwk_metadata;
3645}
3646
3647/*===========================================================================
3648 * FUNCTION   : process_capture_request
3649 *
3650 * DESCRIPTION:
3651 *
3652 * PARAMETERS :
3653 *
3654 *
3655 * RETURN     :
3656 *==========================================================================*/
3657int QCamera3HardwareInterface::process_capture_request(
3658                    const struct camera3_device *device,
3659                    camera3_capture_request_t *request)
3660{
3661    ALOGV("%s: E", __func__);
3662    QCamera3HardwareInterface *hw =
3663        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3664    if (!hw) {
3665        ALOGE("%s: NULL camera device", __func__);
3666        return -EINVAL;
3667    }
3668
3669    int rc = hw->processCaptureRequest(request);
3670    ALOGV("%s: X", __func__);
3671    return rc;
3672}
3673
3674/*===========================================================================
3675 * FUNCTION   : get_metadata_vendor_tag_ops
3676 *
3677 * DESCRIPTION:
3678 *
3679 * PARAMETERS :
3680 *
3681 *
3682 * RETURN     :
3683 *==========================================================================*/
3684
3685void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3686                const struct camera3_device *device,
3687                vendor_tag_query_ops_t* ops)
3688{
3689    ALOGV("%s: E", __func__);
3690    QCamera3HardwareInterface *hw =
3691        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3692    if (!hw) {
3693        ALOGE("%s: NULL camera device", __func__);
3694        return;
3695    }
3696
3697    hw->getMetadataVendorTagOps(ops);
3698    ALOGV("%s: X", __func__);
3699    return;
3700}
3701
3702/*===========================================================================
3703 * FUNCTION   : dump
3704 *
3705 * DESCRIPTION:
3706 *
3707 * PARAMETERS :
3708 *
3709 *
3710 * RETURN     :
3711 *==========================================================================*/
3712
3713void QCamera3HardwareInterface::dump(
3714                const struct camera3_device *device, int fd)
3715{
3716    ALOGV("%s: E", __func__);
3717    QCamera3HardwareInterface *hw =
3718        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3719    if (!hw) {
3720        ALOGE("%s: NULL camera device", __func__);
3721        return;
3722    }
3723
3724    hw->dump(fd);
3725    ALOGV("%s: X", __func__);
3726    return;
3727}
3728
3729/*===========================================================================
3730 * FUNCTION   : close_camera_device
3731 *
3732 * DESCRIPTION:
3733 *
3734 * PARAMETERS :
3735 *
3736 *
3737 * RETURN     :
3738 *==========================================================================*/
3739int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3740{
3741    ALOGV("%s: E", __func__);
3742    int ret = NO_ERROR;
3743    QCamera3HardwareInterface *hw =
3744        reinterpret_cast<QCamera3HardwareInterface *>(
3745            reinterpret_cast<camera3_device_t *>(device)->priv);
3746    if (!hw) {
3747        ALOGE("NULL camera device");
3748        return BAD_VALUE;
3749    }
3750    delete hw;
3751
3752    pthread_mutex_lock(&mCameraSessionLock);
3753    mCameraSessionActive = 0;
3754    pthread_mutex_unlock(&mCameraSessionLock);
3755    ALOGV("%s: X", __func__);
3756    return ret;
3757}
3758
3759/*===========================================================================
3760 * FUNCTION   : getWaveletDenoiseProcessPlate
3761 *
3762 * DESCRIPTION: query wavelet denoise process plate
3763 *
3764 * PARAMETERS : None
3765 *
3766 * RETURN     : WNR prcocess plate vlaue
3767 *==========================================================================*/
3768cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3769{
3770    char prop[PROPERTY_VALUE_MAX];
3771    memset(prop, 0, sizeof(prop));
3772    property_get("persist.denoise.process.plates", prop, "0");
3773    int processPlate = atoi(prop);
3774    switch(processPlate) {
3775    case 0:
3776        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3777    case 1:
3778        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3779    case 2:
3780        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3781    case 3:
3782        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3783    default:
3784        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3785    }
3786}
3787
3788/*===========================================================================
3789 * FUNCTION   : needRotationReprocess
3790 *
3791 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3792 *
3793 * PARAMETERS : none
3794 *
3795 * RETURN     : true: needed
3796 *              false: no need
3797 *==========================================================================*/
3798bool QCamera3HardwareInterface::needRotationReprocess()
3799{
3800
3801    if (!mJpegSettings->is_jpeg_format) {
3802        // RAW image, no need to reprocess
3803        return false;
3804    }
3805
3806    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3807        mJpegSettings->jpeg_orientation > 0) {
3808        // current rotation is not zero, and pp has the capability to process rotation
3809        ALOGD("%s: need do reprocess for rotation", __func__);
3810        return true;
3811    }
3812
3813    return false;
3814}
3815
3816/*===========================================================================
3817 * FUNCTION   : needReprocess
3818 *
3819 * DESCRIPTION: if reprocess in needed
3820 *
3821 * PARAMETERS : none
3822 *
3823 * RETURN     : true: needed
3824 *              false: no need
3825 *==========================================================================*/
3826bool QCamera3HardwareInterface::needReprocess()
3827{
3828    if (!mJpegSettings->is_jpeg_format) {
3829        // RAW image, no need to reprocess
3830        return false;
3831    }
3832
3833    if ((mJpegSettings->min_required_pp_mask > 0) ||
3834         isWNREnabled()) {
3835        // TODO: add for ZSL HDR later
3836        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3837        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3838        return true;
3839    }
3840    return needRotationReprocess();
3841}
3842
3843/*===========================================================================
3844 * FUNCTION   : addOnlineReprocChannel
3845 *
3846 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3847 *              coming from input channel
3848 *
3849 * PARAMETERS :
3850 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3851 *
3852 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3853 *==========================================================================*/
3854QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3855              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3856{
3857    int32_t rc = NO_ERROR;
3858    QCamera3ReprocessChannel *pChannel = NULL;
3859    if (pInputChannel == NULL) {
3860        ALOGE("%s: input channel obj is NULL", __func__);
3861        return NULL;
3862    }
3863
3864    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3865            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3866    if (NULL == pChannel) {
3867        ALOGE("%s: no mem for reprocess channel", __func__);
3868        return NULL;
3869    }
3870
3871    // Capture channel, only need snapshot and postview streams start together
3872    mm_camera_channel_attr_t attr;
3873    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3874    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3875    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3876    rc = pChannel->initialize();
3877    if (rc != NO_ERROR) {
3878        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3879        delete pChannel;
3880        return NULL;
3881    }
3882
3883    // pp feature config
3884    cam_pp_feature_config_t pp_config;
3885    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3886    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3887        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3888        pp_config.sharpness = 10;
3889    }
3890
3891    if (isWNREnabled()) {
3892        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3893        pp_config.denoise2d.denoise_enable = 1;
3894        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3895    }
3896    if (needRotationReprocess()) {
3897        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3898        int rotation = mJpegSettings->jpeg_orientation;
3899        if (rotation == 0) {
3900            pp_config.rotation = ROTATE_0;
3901        } else if (rotation == 90) {
3902            pp_config.rotation = ROTATE_90;
3903        } else if (rotation == 180) {
3904            pp_config.rotation = ROTATE_180;
3905        } else if (rotation == 270) {
3906            pp_config.rotation = ROTATE_270;
3907        }
3908    }
3909
3910   rc = pChannel->addReprocStreamsFromSource(pp_config,
3911                                             pInputChannel,
3912                                             mMetadataChannel);
3913
3914    if (rc != NO_ERROR) {
3915        delete pChannel;
3916        return NULL;
3917    }
3918    return pChannel;
3919}
3920
3921int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3922{
3923    return gCamCapability[mCameraId]->min_num_pp_bufs;
3924}
3925
3926bool QCamera3HardwareInterface::isWNREnabled() {
3927    return gCamCapability[mCameraId]->isWnrSupported;
3928}
3929
3930}; //end namespace qcamera
3931