QCamera3HWI.cpp revision 2c0ec473291d45f54a48e48d2f7471630119f08d
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mPictureChannel(NULL),
162      mFirstRequest(false),
163      mParamHeap(NULL),
164      mParameters(NULL),
165      mJpegSettings(NULL),
166      mIsZslMode(false),
167      m_pPowerModule(NULL)
168{
169    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
170    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
171    mCameraDevice.common.close = close_camera_device;
172    mCameraDevice.ops = &mCameraOps;
173    mCameraDevice.priv = this;
174    gCamCapability[cameraId]->version = CAM_HAL_V3;
175    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
176    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
177    gCamCapability[cameraId]->min_num_pp_bufs = 3;
178
179    pthread_cond_init(&mRequestCond, NULL);
180    mPendingRequest = 0;
181    mCurrentRequestId = -1;
182    pthread_mutex_init(&mMutex, NULL);
183
184    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
185        mDefaultMetadata[i] = NULL;
186
187#ifdef HAS_MULTIMEDIA_HINTS
188    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
189        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
190    }
191#endif
192}
193
194/*===========================================================================
195 * FUNCTION   : ~QCamera3HardwareInterface
196 *
197 * DESCRIPTION: destructor of QCamera3HardwareInterface
198 *
199 * PARAMETERS : none
200 *
201 * RETURN     : none
202 *==========================================================================*/
203QCamera3HardwareInterface::~QCamera3HardwareInterface()
204{
205    ALOGV("%s: E", __func__);
206    /* We need to stop all streams before deleting any stream */
207    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
208        it != mStreamInfo.end(); it++) {
209        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
210        if (channel)
211           channel->stop();
212    }
213    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
214        it != mStreamInfo.end(); it++) {
215        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
216        if (channel)
217            delete channel;
218        free (*it);
219    }
220
221    mPictureChannel = NULL;
222
223    if (mJpegSettings != NULL) {
224        free(mJpegSettings);
225        mJpegSettings = NULL;
226    }
227
228    /* Clean up all channels */
229    if (mCameraInitialized) {
230        mMetadataChannel->stop();
231        delete mMetadataChannel;
232        mMetadataChannel = NULL;
233        deinitParameters();
234    }
235
236    if (mCameraOpened)
237        closeCamera();
238
239    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
240        if (mDefaultMetadata[i])
241            free_camera_metadata(mDefaultMetadata[i]);
242
243    pthread_cond_destroy(&mRequestCond);
244
245    pthread_mutex_destroy(&mMutex);
246    ALOGV("%s: X", __func__);
247}
248
249/*===========================================================================
250 * FUNCTION   : openCamera
251 *
252 * DESCRIPTION: open camera
253 *
254 * PARAMETERS :
255 *   @hw_device  : double ptr for camera device struct
256 *
257 * RETURN     : int32_t type of status
258 *              NO_ERROR  -- success
259 *              none-zero failure code
260 *==========================================================================*/
261int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
262{
263    int rc = 0;
264    pthread_mutex_lock(&mCameraSessionLock);
265    if (mCameraSessionActive) {
266        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
267        pthread_mutex_unlock(&mCameraSessionLock);
268        return INVALID_OPERATION;
269    }
270
271    if (mCameraOpened) {
272        *hw_device = NULL;
273        return PERMISSION_DENIED;
274    }
275
276    rc = openCamera();
277    if (rc == 0) {
278        *hw_device = &mCameraDevice.common;
279        mCameraSessionActive = 1;
280    } else
281        *hw_device = NULL;
282
283#ifdef HAS_MULTIMEDIA_HINTS
284    if (rc == 0) {
285        if (m_pPowerModule) {
286            if (m_pPowerModule->powerHint) {
287                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
288                        (void *)"state=1");
289            }
290        }
291    }
292#endif
293    pthread_mutex_unlock(&mCameraSessionLock);
294    return rc;
295}
296
297/*===========================================================================
298 * FUNCTION   : openCamera
299 *
300 * DESCRIPTION: open camera
301 *
302 * PARAMETERS : none
303 *
304 * RETURN     : int32_t type of status
305 *              NO_ERROR  -- success
306 *              none-zero failure code
307 *==========================================================================*/
308int QCamera3HardwareInterface::openCamera()
309{
310    if (mCameraHandle) {
311        ALOGE("Failure: Camera already opened");
312        return ALREADY_EXISTS;
313    }
314    mCameraHandle = camera_open(mCameraId);
315    if (!mCameraHandle) {
316        ALOGE("camera_open failed.");
317        return UNKNOWN_ERROR;
318    }
319
320    mCameraOpened = true;
321
322    return NO_ERROR;
323}
324
325/*===========================================================================
326 * FUNCTION   : closeCamera
327 *
328 * DESCRIPTION: close camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::closeCamera()
337{
338    int rc = NO_ERROR;
339
340    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
341    mCameraHandle = NULL;
342    mCameraOpened = false;
343
344#ifdef HAS_MULTIMEDIA_HINTS
345    if (rc == NO_ERROR) {
346        if (m_pPowerModule) {
347            if (m_pPowerModule->powerHint) {
348                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
349                        (void *)"state=0");
350            }
351        }
352    }
353#endif
354
355    return rc;
356}
357
358/*===========================================================================
359 * FUNCTION   : initialize
360 *
361 * DESCRIPTION: Initialize frameworks callback functions
362 *
363 * PARAMETERS :
364 *   @callback_ops : callback function to frameworks
365 *
366 * RETURN     :
367 *
368 *==========================================================================*/
369int QCamera3HardwareInterface::initialize(
370        const struct camera3_callback_ops *callback_ops)
371{
372    int rc;
373
374    pthread_mutex_lock(&mMutex);
375
376    rc = initParameters();
377    if (rc < 0) {
378        ALOGE("%s: initParamters failed %d", __func__, rc);
379       goto err1;
380    }
381    //Create metadata channel and initialize it
382    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
383                    mCameraHandle->ops, captureResultCb,
384                    &gCamCapability[mCameraId]->padding_info, this);
385    if (mMetadataChannel == NULL) {
386        ALOGE("%s: failed to allocate metadata channel", __func__);
387        rc = -ENOMEM;
388        goto err2;
389    }
390    rc = mMetadataChannel->initialize();
391    if (rc < 0) {
392        ALOGE("%s: metadata channel initialization failed", __func__);
393        goto err3;
394    }
395
396    mCallbackOps = callback_ops;
397
398    pthread_mutex_unlock(&mMutex);
399    mCameraInitialized = true;
400    return 0;
401
402err3:
403    delete mMetadataChannel;
404    mMetadataChannel = NULL;
405err2:
406    deinitParameters();
407err1:
408    pthread_mutex_unlock(&mMutex);
409    return rc;
410}
411
412/*===========================================================================
413 * FUNCTION   : configureStreams
414 *
415 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
416 *              and output streams.
417 *
418 * PARAMETERS :
419 *   @stream_list : streams to be configured
420 *
421 * RETURN     :
422 *
423 *==========================================================================*/
424int QCamera3HardwareInterface::configureStreams(
425        camera3_stream_configuration_t *streamList)
426{
427    int rc = 0;
428    pthread_mutex_lock(&mMutex);
429    // Sanity check stream_list
430    if (streamList == NULL) {
431        ALOGE("%s: NULL stream configuration", __func__);
432        pthread_mutex_unlock(&mMutex);
433        return BAD_VALUE;
434    }
435
436    if (streamList->streams == NULL) {
437        ALOGE("%s: NULL stream list", __func__);
438        pthread_mutex_unlock(&mMutex);
439        return BAD_VALUE;
440    }
441
442    if (streamList->num_streams < 1) {
443        ALOGE("%s: Bad number of streams requested: %d", __func__,
444                streamList->num_streams);
445        pthread_mutex_unlock(&mMutex);
446        return BAD_VALUE;
447    }
448
449    camera3_stream_t *inputStream = NULL;
450    camera3_stream_t *jpegStream = NULL;
451    /* first invalidate all the steams in the mStreamList
452     * if they appear again, they will be validated */
453    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
454            it != mStreamInfo.end(); it++) {
455        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
456        channel->stop();
457        (*it)->status = INVALID;
458    }
459
460    for (size_t i = 0; i < streamList->num_streams; i++) {
461        camera3_stream_t *newStream = streamList->streams[i];
462        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
463                __func__, newStream->stream_type, newStream->format,
464                 newStream->width, newStream->height);
465        //if the stream is in the mStreamList validate it
466        bool stream_exists = false;
467        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
468                it != mStreamInfo.end(); it++) {
469            if ((*it)->stream == newStream) {
470                QCamera3Channel *channel =
471                    (QCamera3Channel*)(*it)->stream->priv;
472                stream_exists = true;
473                (*it)->status = RECONFIGURE;
474                /*delete the channel object associated with the stream because
475                  we need to reconfigure*/
476                delete channel;
477                (*it)->stream->priv = NULL;
478            }
479        }
480        if (!stream_exists) {
481            //new stream
482            stream_info_t* stream_info;
483            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
484            stream_info->stream = newStream;
485            stream_info->status = VALID;
486            stream_info->registered = 0;
487            mStreamInfo.push_back(stream_info);
488        }
489        if (newStream->stream_type == CAMERA3_STREAM_INPUT
490                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
491            if (inputStream != NULL) {
492                ALOGE("%s: Multiple input streams requested!", __func__);
493                pthread_mutex_unlock(&mMutex);
494                return BAD_VALUE;
495            }
496            inputStream = newStream;
497        }
498        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
499            jpegStream = newStream;
500        }
501    }
502    mInputStream = inputStream;
503
504    /*clean up invalid streams*/
505    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
506            it != mStreamInfo.end();) {
507        if(((*it)->status) == INVALID){
508            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
509            delete channel;
510            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
511            free(*it);
512            it = mStreamInfo.erase(it);
513        } else {
514            it++;
515        }
516    }
517
518    //mMetadataChannel->stop();
519
520    /* Allocate channel objects for the requested streams */
521    for (size_t i = 0; i < streamList->num_streams; i++) {
522        camera3_stream_t *newStream = streamList->streams[i];
523        if (newStream->priv == NULL) {
524            //New stream, construct channel
525            switch (newStream->stream_type) {
526            case CAMERA3_STREAM_INPUT:
527                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
528                break;
529            case CAMERA3_STREAM_BIDIRECTIONAL:
530                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
531                    GRALLOC_USAGE_HW_CAMERA_WRITE;
532                break;
533            case CAMERA3_STREAM_OUTPUT:
534                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
535                break;
536            default:
537                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
538                break;
539            }
540
541            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
542                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
543                QCamera3Channel *channel;
544                switch (newStream->format) {
545                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
546                case HAL_PIXEL_FORMAT_YCbCr_420_888:
547                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
548                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
549                        jpegStream) {
550                        uint32_t width = jpegStream->width;
551                        uint32_t height = jpegStream->height;
552                        mIsZslMode = true;
553                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
554                            mCameraHandle->ops, captureResultCb,
555                            &gCamCapability[mCameraId]->padding_info, this, newStream,
556                            width, height);
557                    } else
558                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
559                            mCameraHandle->ops, captureResultCb,
560                            &gCamCapability[mCameraId]->padding_info, this, newStream);
561                    if (channel == NULL) {
562                        ALOGE("%s: allocation of channel failed", __func__);
563                        pthread_mutex_unlock(&mMutex);
564                        return -ENOMEM;
565                    }
566
567                    newStream->priv = channel;
568                    break;
569                case HAL_PIXEL_FORMAT_BLOB:
570                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
571                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
572                            mCameraHandle->ops, captureResultCb,
573                            &gCamCapability[mCameraId]->padding_info, this, newStream);
574                    if (mPictureChannel == NULL) {
575                        ALOGE("%s: allocation of channel failed", __func__);
576                        pthread_mutex_unlock(&mMutex);
577                        return -ENOMEM;
578                    }
579                    newStream->priv = (QCamera3Channel*)mPictureChannel;
580                    break;
581
582                //TODO: Add support for app consumed format?
583                default:
584                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
585                    break;
586                }
587            }
588        } else {
589            // Channel already exists for this stream
590            // Do nothing for now
591        }
592    }
593    /*For the streams to be reconfigured we need to register the buffers
594      since the framework wont*/
595    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
596            it != mStreamInfo.end(); it++) {
597        if ((*it)->status == RECONFIGURE) {
598            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
599            /*only register buffers for streams that have already been
600              registered*/
601            if ((*it)->registered) {
602                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
603                        (*it)->buffer_set.buffers);
604                if (rc != NO_ERROR) {
605                    ALOGE("%s: Failed to register the buffers of old stream,\
606                            rc = %d", __func__, rc);
607                }
608                ALOGV("%s: channel %p has %d buffers",
609                        __func__, channel, (*it)->buffer_set.num_buffers);
610            }
611        }
612
613        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
614        if (index == NAME_NOT_FOUND) {
615            mPendingBuffersMap.add((*it)->stream, 0);
616        } else {
617            mPendingBuffersMap.editValueAt(index) = 0;
618        }
619    }
620
621    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
622    mPendingRequestsList.clear();
623
624    //settings/parameters don't carry over for new configureStreams
625    memset(mParameters, 0, sizeof(parm_buffer_t));
626    mFirstRequest = true;
627
628    pthread_mutex_unlock(&mMutex);
629    return rc;
630}
631
632/*===========================================================================
633 * FUNCTION   : validateCaptureRequest
634 *
635 * DESCRIPTION: validate a capture request from camera service
636 *
637 * PARAMETERS :
638 *   @request : request from framework to process
639 *
640 * RETURN     :
641 *
642 *==========================================================================*/
643int QCamera3HardwareInterface::validateCaptureRequest(
644                    camera3_capture_request_t *request)
645{
646    ssize_t idx = 0;
647    const camera3_stream_buffer_t *b;
648    CameraMetadata meta;
649
650    /* Sanity check the request */
651    if (request == NULL) {
652        ALOGE("%s: NULL capture request", __func__);
653        return BAD_VALUE;
654    }
655
656    uint32_t frameNumber = request->frame_number;
657    if (request->input_buffer != NULL &&
658            request->input_buffer->stream != mInputStream) {
659        ALOGE("%s: Request %d: Input buffer not from input stream!",
660                __FUNCTION__, frameNumber);
661        return BAD_VALUE;
662    }
663    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
664        ALOGE("%s: Request %d: No output buffers provided!",
665                __FUNCTION__, frameNumber);
666        return BAD_VALUE;
667    }
668    if (request->input_buffer != NULL) {
669        b = request->input_buffer;
670        QCamera3Channel *channel =
671            static_cast<QCamera3Channel*>(b->stream->priv);
672        if (channel == NULL) {
673            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
674                    __func__, frameNumber, idx);
675            return BAD_VALUE;
676        }
677        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
678            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
679                    __func__, frameNumber, idx);
680            return BAD_VALUE;
681        }
682        if (b->release_fence != -1) {
683            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
684                    __func__, frameNumber, idx);
685            return BAD_VALUE;
686        }
687        if (b->buffer == NULL) {
688            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
689                    __func__, frameNumber, idx);
690            return BAD_VALUE;
691        }
692    }
693
694    // Validate all buffers
695    b = request->output_buffers;
696    do {
697        QCamera3Channel *channel =
698                static_cast<QCamera3Channel*>(b->stream->priv);
699        if (channel == NULL) {
700            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
701                    __func__, frameNumber, idx);
702            return BAD_VALUE;
703        }
704        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
705            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
706                    __func__, frameNumber, idx);
707            return BAD_VALUE;
708        }
709        if (b->release_fence != -1) {
710            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
711                    __func__, frameNumber, idx);
712            return BAD_VALUE;
713        }
714        if (b->buffer == NULL) {
715            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
716                    __func__, frameNumber, idx);
717            return BAD_VALUE;
718        }
719        idx++;
720        b = request->output_buffers + idx;
721    } while (idx < (ssize_t)request->num_output_buffers);
722
723    return NO_ERROR;
724}
725
726/*===========================================================================
727 * FUNCTION   : registerStreamBuffers
728 *
729 * DESCRIPTION: Register buffers for a given stream with the HAL device.
730 *
731 * PARAMETERS :
732 *   @stream_list : streams to be configured
733 *
734 * RETURN     :
735 *
736 *==========================================================================*/
737int QCamera3HardwareInterface::registerStreamBuffers(
738        const camera3_stream_buffer_set_t *buffer_set)
739{
740    int rc = 0;
741
742    pthread_mutex_lock(&mMutex);
743
744    if (buffer_set == NULL) {
745        ALOGE("%s: Invalid buffer_set parameter.", __func__);
746        pthread_mutex_unlock(&mMutex);
747        return -EINVAL;
748    }
749    if (buffer_set->stream == NULL) {
750        ALOGE("%s: Invalid stream parameter.", __func__);
751        pthread_mutex_unlock(&mMutex);
752        return -EINVAL;
753    }
754    if (buffer_set->num_buffers < 1) {
755        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
756        pthread_mutex_unlock(&mMutex);
757        return -EINVAL;
758    }
759    if (buffer_set->buffers == NULL) {
760        ALOGE("%s: Invalid buffers parameter.", __func__);
761        pthread_mutex_unlock(&mMutex);
762        return -EINVAL;
763    }
764
765    camera3_stream_t *stream = buffer_set->stream;
766    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
767
768    //set the buffer_set in the mStreamInfo array
769    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
770            it != mStreamInfo.end(); it++) {
771        if ((*it)->stream == stream) {
772            uint32_t numBuffers = buffer_set->num_buffers;
773            (*it)->buffer_set.stream = buffer_set->stream;
774            (*it)->buffer_set.num_buffers = numBuffers;
775            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
776            if ((*it)->buffer_set.buffers == NULL) {
777                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
778                pthread_mutex_unlock(&mMutex);
779                return -ENOMEM;
780            }
781            for (size_t j = 0; j < numBuffers; j++){
782                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
783            }
784            (*it)->registered = 1;
785        }
786    }
787    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
788    if (rc < 0) {
789        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
790        pthread_mutex_unlock(&mMutex);
791        return -ENODEV;
792    }
793
794    pthread_mutex_unlock(&mMutex);
795    return NO_ERROR;
796}
797
798/*===========================================================================
799 * FUNCTION   : processCaptureRequest
800 *
801 * DESCRIPTION: process a capture request from camera service
802 *
803 * PARAMETERS :
804 *   @request : request from framework to process
805 *
806 * RETURN     :
807 *
808 *==========================================================================*/
809int QCamera3HardwareInterface::processCaptureRequest(
810                    camera3_capture_request_t *request)
811{
812    int rc = NO_ERROR;
813    int32_t request_id;
814    CameraMetadata meta;
815
816    pthread_mutex_lock(&mMutex);
817
818    rc = validateCaptureRequest(request);
819    if (rc != NO_ERROR) {
820        ALOGE("%s: incoming request is not valid", __func__);
821        pthread_mutex_unlock(&mMutex);
822        return rc;
823    }
824
825    uint32_t frameNumber = request->frame_number;
826    rc = setFrameParameters(request->frame_number, request->settings);
827    if (rc < 0) {
828        ALOGE("%s: fail to set frame parameters", __func__);
829        pthread_mutex_unlock(&mMutex);
830        return rc;
831    }
832
833    meta = request->settings;
834    if (meta.exists(ANDROID_REQUEST_ID)) {
835        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
836        mCurrentRequestId = request_id;
837        ALOGV("%s: Received request with id: %d",__func__, request_id);
838    } else if (mFirstRequest || mCurrentRequestId == -1){
839        ALOGE("%s: Unable to find request id field, \
840                & no previous id available", __func__);
841        return NAME_NOT_FOUND;
842    } else {
843        ALOGV("%s: Re-using old request id", __func__);
844        request_id = mCurrentRequestId;
845    }
846
847    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
848                                    __func__, __LINE__,
849                                    request->num_output_buffers,
850                                    request->input_buffer,
851                                    frameNumber);
852    // Acquire all request buffers first
853    int blob_request = 0;
854    for (size_t i = 0; i < request->num_output_buffers; i++) {
855        const camera3_stream_buffer_t& output = request->output_buffers[i];
856        sp<Fence> acquireFence = new Fence(output.acquire_fence);
857
858        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
859        //Call function to store local copy of jpeg data for encode params.
860            blob_request = 1;
861            rc = getJpegSettings(request->settings);
862            if (rc < 0) {
863                ALOGE("%s: failed to get jpeg parameters", __func__);
864                pthread_mutex_unlock(&mMutex);
865                return rc;
866            }
867        }
868
869        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
870        if (rc != OK) {
871            ALOGE("%s: fence wait failed %d", __func__, rc);
872            pthread_mutex_unlock(&mMutex);
873            return rc;
874        }
875    }
876
877    /* Update pending request list and pending buffers map */
878    PendingRequestInfo pendingRequest;
879    pendingRequest.frame_number = frameNumber;
880    pendingRequest.num_buffers = request->num_output_buffers;
881    pendingRequest.request_id = request_id;
882    pendingRequest.blob_request = blob_request;
883
884    for (size_t i = 0; i < request->num_output_buffers; i++) {
885        RequestedBufferInfo requestedBuf;
886        requestedBuf.stream = request->output_buffers[i].stream;
887        requestedBuf.buffer = NULL;
888        pendingRequest.buffers.push_back(requestedBuf);
889
890        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
891    }
892    mPendingRequestsList.push_back(pendingRequest);
893
894    // Notify metadata channel we receive a request
895    mMetadataChannel->request(NULL, frameNumber);
896
897    // Call request on other streams
898    for (size_t i = 0; i < request->num_output_buffers; i++) {
899        const camera3_stream_buffer_t& output = request->output_buffers[i];
900        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
901        mm_camera_buf_def_t *pInputBuffer = NULL;
902
903        if (channel == NULL) {
904            ALOGE("%s: invalid channel pointer for stream", __func__);
905            continue;
906        }
907
908        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
909            QCamera3RegularChannel* inputChannel = NULL;
910            if(request->input_buffer != NULL){
911
912                //Try to get the internal format
913                inputChannel = (QCamera3RegularChannel*)
914                    request->input_buffer->stream->priv;
915                if(inputChannel == NULL ){
916                    ALOGE("%s: failed to get input channel handle", __func__);
917                } else {
918                    pInputBuffer =
919                        inputChannel->getInternalFormatBuffer(
920                                request->input_buffer->buffer);
921                    ALOGD("%s: Input buffer dump",__func__);
922                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
923                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
924                    ALOGD("frame len:%d", pInputBuffer->frame_len);
925                }
926            }
927            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
928                            pInputBuffer,(QCamera3Channel*)inputChannel);
929        } else {
930            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
931                __LINE__, output.buffer, frameNumber);
932            rc = channel->request(output.buffer, frameNumber);
933        }
934        if (rc < 0)
935            ALOGE("%s: request failed", __func__);
936    }
937
938    mFirstRequest = false;
939
940    //Block on conditional variable
941    mPendingRequest = 1;
942    while (mPendingRequest == 1) {
943        pthread_cond_wait(&mRequestCond, &mMutex);
944    }
945
946    pthread_mutex_unlock(&mMutex);
947    return rc;
948}
949
950/*===========================================================================
951 * FUNCTION   : getMetadataVendorTagOps
952 *
953 * DESCRIPTION:
954 *
955 * PARAMETERS :
956 *
957 *
958 * RETURN     :
959 *==========================================================================*/
960void QCamera3HardwareInterface::getMetadataVendorTagOps(
961                    vendor_tag_query_ops_t* /*ops*/)
962{
963    /* Enable locks when we eventually add Vendor Tags */
964    /*
965    pthread_mutex_lock(&mMutex);
966
967    pthread_mutex_unlock(&mMutex);
968    */
969    return;
970}
971
972/*===========================================================================
973 * FUNCTION   : dump
974 *
975 * DESCRIPTION:
976 *
977 * PARAMETERS :
978 *
979 *
980 * RETURN     :
981 *==========================================================================*/
982void QCamera3HardwareInterface::dump(int /*fd*/)
983{
984    /*Enable lock when we implement this function*/
985    /*
986    pthread_mutex_lock(&mMutex);
987
988    pthread_mutex_unlock(&mMutex);
989    */
990    return;
991}
992
993
994/*===========================================================================
995 * FUNCTION   : captureResultCb
996 *
997 * DESCRIPTION: Callback handler for all capture result
998 *              (streams, as well as metadata)
999 *
1000 * PARAMETERS :
1001 *   @metadata : metadata information
1002 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1003 *               NULL if metadata.
1004 *
1005 * RETURN     : NONE
1006 *==========================================================================*/
1007void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1008                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1009{
1010    pthread_mutex_lock(&mMutex);
1011
1012    if (metadata_buf) {
1013        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1014        int32_t frame_number_valid = *(int32_t *)
1015            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1016        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1017            CAM_INTF_META_PENDING_REQUESTS, metadata);
1018        uint32_t frame_number = *(uint32_t *)
1019            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1020        const struct timeval *tv = (const struct timeval *)
1021            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1022        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1023            tv->tv_usec * NSEC_PER_USEC;
1024
1025        if (!frame_number_valid) {
1026            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1027            mMetadataChannel->bufDone(metadata_buf);
1028            goto done_metadata;
1029        }
1030        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1031                frame_number, capture_time);
1032
1033        // Go through the pending requests info and send shutter/results to frameworks
1034        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1035                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1036            camera3_capture_result_t result;
1037            camera3_notify_msg_t notify_msg;
1038            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1039
1040            // Flush out all entries with less or equal frame numbers.
1041
1042            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1043            //Right now it's the same as metadata timestamp
1044
1045            //TODO: When there is metadata drop, how do we derive the timestamp of
1046            //dropped frames? For now, we fake the dropped timestamp by substracting
1047            //from the reported timestamp
1048            nsecs_t current_capture_time = capture_time -
1049                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1050
1051            // Send shutter notify to frameworks
1052            notify_msg.type = CAMERA3_MSG_SHUTTER;
1053            notify_msg.message.shutter.frame_number = i->frame_number;
1054            notify_msg.message.shutter.timestamp = current_capture_time;
1055            mCallbackOps->notify(mCallbackOps, &notify_msg);
1056            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1057                    i->frame_number, capture_time);
1058
1059            // Send empty metadata with already filled buffers for dropped metadata
1060            // and send valid metadata with already filled buffers for current metadata
1061            if (i->frame_number < frame_number) {
1062                CameraMetadata dummyMetadata;
1063                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1064                        &current_capture_time, 1);
1065                dummyMetadata.update(ANDROID_REQUEST_ID,
1066                        &(i->request_id), 1);
1067                result.result = dummyMetadata.release();
1068            } else {
1069                result.result = translateCbMetadataToResultMetadata(metadata,
1070                        current_capture_time, i->request_id);
1071                if (i->blob_request) {
1072                   //If it is a blob request then send the metadata to the picture channel
1073                   mPictureChannel->queueMetadata(metadata_buf);
1074
1075                } else {
1076                   // Return metadata buffer
1077                   mMetadataChannel->bufDone(metadata_buf);
1078                   free(metadata_buf);
1079                }
1080            }
1081            if (!result.result) {
1082                ALOGE("%s: metadata is NULL", __func__);
1083            }
1084            result.frame_number = i->frame_number;
1085            result.num_output_buffers = 0;
1086            result.output_buffers = NULL;
1087            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1088                    j != i->buffers.end(); j++) {
1089                if (j->buffer) {
1090                    result.num_output_buffers++;
1091                }
1092            }
1093
1094            if (result.num_output_buffers > 0) {
1095                camera3_stream_buffer_t *result_buffers =
1096                    new camera3_stream_buffer_t[result.num_output_buffers];
1097                if (!result_buffers) {
1098                    ALOGE("%s: Fatal error: out of memory", __func__);
1099                }
1100                size_t result_buffers_idx = 0;
1101                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1102                        j != i->buffers.end(); j++) {
1103                    if (j->buffer) {
1104                        result_buffers[result_buffers_idx++] = *(j->buffer);
1105                        free(j->buffer);
1106                        j->buffer = NULL;
1107                        mPendingBuffersMap.editValueFor(j->stream)--;
1108                    }
1109                }
1110                result.output_buffers = result_buffers;
1111
1112                mCallbackOps->process_capture_result(mCallbackOps, &result);
1113                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1114                        __func__, result.frame_number, current_capture_time);
1115                free_camera_metadata((camera_metadata_t *)result.result);
1116                delete[] result_buffers;
1117            } else {
1118                mCallbackOps->process_capture_result(mCallbackOps, &result);
1119                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1120                        __func__, result.frame_number, current_capture_time);
1121                free_camera_metadata((camera_metadata_t *)result.result);
1122            }
1123            // erase the element from the list
1124            i = mPendingRequestsList.erase(i);
1125        }
1126
1127
1128done_metadata:
1129        bool max_buffers_dequeued = false;
1130        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1131            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1132            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1133            if (queued_buffers == stream->max_buffers) {
1134                max_buffers_dequeued = true;
1135                break;
1136            }
1137        }
1138        if (!max_buffers_dequeued && !pending_requests) {
1139            // Unblock process_capture_request
1140            mPendingRequest = 0;
1141            pthread_cond_signal(&mRequestCond);
1142        }
1143    } else {
1144        // If the frame number doesn't exist in the pending request list,
1145        // directly send the buffer to the frameworks, and update pending buffers map
1146        // Otherwise, book-keep the buffer.
1147        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1148        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1149            i++;
1150        }
1151        if (i == mPendingRequestsList.end()) {
1152            // Verify all pending requests frame_numbers are greater
1153            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1154                    j != mPendingRequestsList.end(); j++) {
1155                if (j->frame_number < frame_number) {
1156                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1157                            __func__, j->frame_number, frame_number);
1158                }
1159            }
1160            camera3_capture_result_t result;
1161            result.result = NULL;
1162            result.frame_number = frame_number;
1163            result.num_output_buffers = 1;
1164            result.output_buffers = buffer;
1165            ALOGV("%s: result frame_number = %d, buffer = %p",
1166                    __func__, frame_number, buffer);
1167            mPendingBuffersMap.editValueFor(buffer->stream)--;
1168            mCallbackOps->process_capture_result(mCallbackOps, &result);
1169        } else {
1170            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1171                    j != i->buffers.end(); j++) {
1172                if (j->stream == buffer->stream) {
1173                    if (j->buffer != NULL) {
1174                        ALOGE("%s: Error: buffer is already set", __func__);
1175                    } else {
1176                        j->buffer = (camera3_stream_buffer_t *)malloc(
1177                                sizeof(camera3_stream_buffer_t));
1178                        *(j->buffer) = *buffer;
1179                        ALOGV("%s: cache buffer %p at result frame_number %d",
1180                                __func__, buffer, frame_number);
1181                    }
1182                }
1183            }
1184        }
1185    }
1186    pthread_mutex_unlock(&mMutex);
1187    return;
1188}
1189
1190/*===========================================================================
1191 * FUNCTION   : translateCbMetadataToResultMetadata
1192 *
1193 * DESCRIPTION:
1194 *
1195 * PARAMETERS :
1196 *   @metadata : metadata information from callback
1197 *
1198 * RETURN     : camera_metadata_t*
1199 *              metadata in a format specified by fwk
1200 *==========================================================================*/
1201camera_metadata_t*
1202QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1203                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1204                                 int32_t request_id)
1205{
1206    CameraMetadata camMetadata;
1207    camera_metadata_t* resultMetadata;
1208
1209    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1210    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1211
1212    /*CAM_INTF_META_HISTOGRAM - TODO*/
1213    /*cam_hist_stats_t  *histogram =
1214      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1215      metadata);*/
1216
1217    /*face detection*/
1218    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1219        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1220    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1221    int32_t faceIds[numFaces];
1222    uint8_t faceScores[numFaces];
1223    int32_t faceRectangles[numFaces * 4];
1224    int32_t faceLandmarks[numFaces * 6];
1225    int j = 0, k = 0;
1226    for (int i = 0; i < numFaces; i++) {
1227        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1228        faceScores[i] = faceDetectionInfo->faces[i].score;
1229        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1230                faceRectangles+j, -1);
1231        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1232        j+= 4;
1233        k+= 6;
1234    }
1235    if (numFaces > 0) {
1236        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1237        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1238        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1239            faceRectangles, numFaces*4);
1240        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1241            faceLandmarks, numFaces*6);
1242    }
1243
1244    uint8_t  *color_correct_mode =
1245        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1246    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1247
1248    int32_t  *ae_precapture_id =
1249        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1250    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1251
1252    /*aec regions*/
1253    cam_area_t  *hAeRegions =
1254        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1255    int32_t aeRegions[5];
1256    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1257    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1258   if(mIsZslMode) {
1259        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1260        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1261    } else {
1262        uint8_t *ae_state =
1263            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1264        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1265   }
1266    uint8_t  *focusMode =
1267        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1268    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1269
1270    /*af regions*/
1271    cam_area_t  *hAfRegions =
1272        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1273    int32_t afRegions[5];
1274    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1275    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1276
1277    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1278    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1279
1280    int32_t  *afTriggerId =
1281        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1282    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1283
1284    uint8_t  *whiteBalance =
1285        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1286    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1287
1288    /*awb regions*/
1289    cam_area_t  *hAwbRegions =
1290        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1291    int32_t awbRegions[5];
1292    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1293    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1294
1295    uint8_t  *whiteBalanceState =
1296        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1297    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1298
1299    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1300    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1301
1302    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1303    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1304
1305    uint8_t  *flashPower =
1306        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1307    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1308
1309    int64_t  *flashFiringTime =
1310        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1311    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1312
1313    /*int32_t  *ledMode =
1314      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1315      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1316
1317    uint8_t  *flashState =
1318        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1319    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1320
1321    uint8_t  *hotPixelMode =
1322        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1323    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1324
1325    float  *lensAperture =
1326        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1327    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1328
1329    float  *filterDensity =
1330        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1331    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1332
1333    float  *focalLength =
1334        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1335    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1336
1337    float  *focusDistance =
1338        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1339    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1340
1341    float  *focusRange =
1342        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1343    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1344
1345    uint8_t  *opticalStab =
1346        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1347    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1348
1349    /*int32_t  *focusState =
1350      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1351      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1352
1353    uint8_t  *noiseRedMode =
1354        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1355    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1356
1357    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1358
1359    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1360        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1361    int32_t scalerCropRegion[4];
1362    scalerCropRegion[0] = hScalerCropRegion->left;
1363    scalerCropRegion[1] = hScalerCropRegion->top;
1364    scalerCropRegion[2] = hScalerCropRegion->width;
1365    scalerCropRegion[3] = hScalerCropRegion->height;
1366    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1367
1368    int64_t  *sensorExpTime =
1369        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1370    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1371
1372    int64_t  *sensorFameDuration =
1373        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1374    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1375
1376    int32_t  *sensorSensitivity =
1377        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1378    mMetadataResponse.iso_speed = *sensorSensitivity;
1379    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1380
1381    uint8_t  *shadingMode =
1382        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1383    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1384
1385    uint8_t  *faceDetectMode =
1386        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1387    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1388
1389    uint8_t  *histogramMode =
1390        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1391    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1392
1393    uint8_t  *sharpnessMapMode =
1394        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1395    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1396            sharpnessMapMode, 1);
1397
1398    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1399    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1400        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1401    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1402            (int32_t*)sharpnessMap->sharpness,
1403            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1404
1405    resultMetadata = camMetadata.release();
1406    return resultMetadata;
1407}
1408
1409/*===========================================================================
1410 * FUNCTION   : convertToRegions
1411 *
1412 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1413 *
1414 * PARAMETERS :
1415 *   @rect   : cam_rect_t struct to convert
1416 *   @region : int32_t destination array
1417 *   @weight : if we are converting from cam_area_t, weight is valid
1418 *             else weight = -1
1419 *
1420 *==========================================================================*/
1421void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1422    region[0] = rect.left;
1423    region[1] = rect.top;
1424    region[2] = rect.left + rect.width;
1425    region[3] = rect.top + rect.height;
1426    if (weight > -1) {
1427        region[4] = weight;
1428    }
1429}
1430
1431/*===========================================================================
1432 * FUNCTION   : convertFromRegions
1433 *
1434 * DESCRIPTION: helper method to convert from array to cam_rect_t
1435 *
1436 * PARAMETERS :
1437 *   @rect   : cam_rect_t struct to convert
1438 *   @region : int32_t destination array
1439 *   @weight : if we are converting from cam_area_t, weight is valid
1440 *             else weight = -1
1441 *
1442 *==========================================================================*/
1443void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1444                                                   const camera_metadata_t *settings,
1445                                                   uint32_t tag){
1446    CameraMetadata frame_settings;
1447    frame_settings = settings;
1448    int32_t x_min = frame_settings.find(tag).data.i32[0];
1449    int32_t y_min = frame_settings.find(tag).data.i32[1];
1450    int32_t x_max = frame_settings.find(tag).data.i32[2];
1451    int32_t y_max = frame_settings.find(tag).data.i32[3];
1452    roi->weight = frame_settings.find(tag).data.i32[4];
1453    roi->rect.left = x_min;
1454    roi->rect.top = y_min;
1455    roi->rect.width = x_max - x_min;
1456    roi->rect.height = y_max - y_min;
1457}
1458
1459/*===========================================================================
1460 * FUNCTION   : resetIfNeededROI
1461 *
1462 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1463 *              crop region
1464 *
1465 * PARAMETERS :
1466 *   @roi       : cam_area_t struct to resize
1467 *   @scalerCropRegion : cam_crop_region_t region to compare against
1468 *
1469 *
1470 *==========================================================================*/
1471bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1472                                                 const cam_crop_region_t* scalerCropRegion)
1473{
1474    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1475    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1476    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1477    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1478    if ((roi_x_max < scalerCropRegion->left) ||
1479        (roi_y_max < scalerCropRegion->top)  ||
1480        (roi->rect.left > crop_x_max) ||
1481        (roi->rect.top > crop_y_max)){
1482        return false;
1483    }
1484    if (roi->rect.left < scalerCropRegion->left) {
1485        roi->rect.left = scalerCropRegion->left;
1486    }
1487    if (roi->rect.top < scalerCropRegion->top) {
1488        roi->rect.top = scalerCropRegion->top;
1489    }
1490    if (roi_x_max > crop_x_max) {
1491        roi_x_max = crop_x_max;
1492    }
1493    if (roi_y_max > crop_y_max) {
1494        roi_y_max = crop_y_max;
1495    }
1496    roi->rect.width = roi_x_max - roi->rect.left;
1497    roi->rect.height = roi_y_max - roi->rect.top;
1498    return true;
1499}
1500
1501/*===========================================================================
1502 * FUNCTION   : convertLandmarks
1503 *
1504 * DESCRIPTION: helper method to extract the landmarks from face detection info
1505 *
1506 * PARAMETERS :
1507 *   @face   : cam_rect_t struct to convert
1508 *   @landmarks : int32_t destination array
1509 *
1510 *
1511 *==========================================================================*/
1512void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1513{
1514    landmarks[0] = face.left_eye_center.x;
1515    landmarks[1] = face.left_eye_center.y;
1516    landmarks[2] = face.right_eye_center.y;
1517    landmarks[3] = face.right_eye_center.y;
1518    landmarks[4] = face.mouth_center.x;
1519    landmarks[5] = face.mouth_center.y;
1520}
1521
1522#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1523/*===========================================================================
1524 * FUNCTION   : initCapabilities
1525 *
1526 * DESCRIPTION: initialize camera capabilities in static data struct
1527 *
1528 * PARAMETERS :
1529 *   @cameraId  : camera Id
1530 *
1531 * RETURN     : int32_t type of status
1532 *              NO_ERROR  -- success
1533 *              none-zero failure code
1534 *==========================================================================*/
1535int QCamera3HardwareInterface::initCapabilities(int cameraId)
1536{
1537    int rc = 0;
1538    mm_camera_vtbl_t *cameraHandle = NULL;
1539    QCamera3HeapMemory *capabilityHeap = NULL;
1540
1541    cameraHandle = camera_open(cameraId);
1542    if (!cameraHandle) {
1543        ALOGE("%s: camera_open failed", __func__);
1544        rc = -1;
1545        goto open_failed;
1546    }
1547
1548    capabilityHeap = new QCamera3HeapMemory();
1549    if (capabilityHeap == NULL) {
1550        ALOGE("%s: creation of capabilityHeap failed", __func__);
1551        goto heap_creation_failed;
1552    }
1553    /* Allocate memory for capability buffer */
1554    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1555    if(rc != OK) {
1556        ALOGE("%s: No memory for cappability", __func__);
1557        goto allocate_failed;
1558    }
1559
1560    /* Map memory for capability buffer */
1561    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1562    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1563                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1564                                capabilityHeap->getFd(0),
1565                                sizeof(cam_capability_t));
1566    if(rc < 0) {
1567        ALOGE("%s: failed to map capability buffer", __func__);
1568        goto map_failed;
1569    }
1570
1571    /* Query Capability */
1572    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1573    if(rc < 0) {
1574        ALOGE("%s: failed to query capability",__func__);
1575        goto query_failed;
1576    }
1577    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1578    if (!gCamCapability[cameraId]) {
1579        ALOGE("%s: out of memory", __func__);
1580        goto query_failed;
1581    }
1582    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1583                                        sizeof(cam_capability_t));
1584    rc = 0;
1585
1586query_failed:
1587    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1588                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1589map_failed:
1590    capabilityHeap->deallocate();
1591allocate_failed:
1592    delete capabilityHeap;
1593heap_creation_failed:
1594    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1595    cameraHandle = NULL;
1596open_failed:
1597    return rc;
1598}
1599
1600/*===========================================================================
1601 * FUNCTION   : initParameters
1602 *
1603 * DESCRIPTION: initialize camera parameters
1604 *
1605 * PARAMETERS :
1606 *
1607 * RETURN     : int32_t type of status
1608 *              NO_ERROR  -- success
1609 *              none-zero failure code
1610 *==========================================================================*/
1611int QCamera3HardwareInterface::initParameters()
1612{
1613    int rc = 0;
1614
1615    //Allocate Set Param Buffer
1616    mParamHeap = new QCamera3HeapMemory();
1617    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1618    if(rc != OK) {
1619        rc = NO_MEMORY;
1620        ALOGE("Failed to allocate SETPARM Heap memory");
1621        delete mParamHeap;
1622        mParamHeap = NULL;
1623        return rc;
1624    }
1625
1626    //Map memory for parameters buffer
1627    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1628            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1629            mParamHeap->getFd(0),
1630            sizeof(parm_buffer_t));
1631    if(rc < 0) {
1632        ALOGE("%s:failed to map SETPARM buffer",__func__);
1633        rc = FAILED_TRANSACTION;
1634        mParamHeap->deallocate();
1635        delete mParamHeap;
1636        mParamHeap = NULL;
1637        return rc;
1638    }
1639
1640    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1641    return rc;
1642}
1643
1644/*===========================================================================
1645 * FUNCTION   : deinitParameters
1646 *
1647 * DESCRIPTION: de-initialize camera parameters
1648 *
1649 * PARAMETERS :
1650 *
1651 * RETURN     : NONE
1652 *==========================================================================*/
1653void QCamera3HardwareInterface::deinitParameters()
1654{
1655    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1656            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1657
1658    mParamHeap->deallocate();
1659    delete mParamHeap;
1660    mParamHeap = NULL;
1661
1662    mParameters = NULL;
1663}
1664
1665/*===========================================================================
1666 * FUNCTION   : calcMaxJpegSize
1667 *
1668 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1669 *
1670 * PARAMETERS :
1671 *
1672 * RETURN     : max_jpeg_size
1673 *==========================================================================*/
1674int QCamera3HardwareInterface::calcMaxJpegSize()
1675{
1676    int32_t max_jpeg_size = 0;
1677    int temp_width, temp_height;
1678    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1679        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1680        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1681        if (temp_width * temp_height > max_jpeg_size ) {
1682            max_jpeg_size = temp_width * temp_height;
1683        }
1684    }
1685    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1686    return max_jpeg_size;
1687}
1688
1689/*===========================================================================
1690 * FUNCTION   : initStaticMetadata
1691 *
1692 * DESCRIPTION: initialize the static metadata
1693 *
1694 * PARAMETERS :
1695 *   @cameraId  : camera Id
1696 *
1697 * RETURN     : int32_t type of status
1698 *              0  -- success
1699 *              non-zero failure code
1700 *==========================================================================*/
1701int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1702{
1703    int rc = 0;
1704    CameraMetadata staticInfo;
1705
1706    /* android.info: hardware level */
1707    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1708    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1709        &supportedHardwareLevel, 1);
1710
1711    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1712    /*HAL 3 only*/
1713    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1714                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1715
1716    /*hard coded for now but this should come from sensor*/
1717    float min_focus_distance;
1718    if(facingBack){
1719        min_focus_distance = 10;
1720    } else {
1721        min_focus_distance = 0;
1722    }
1723    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1724                    &min_focus_distance, 1);
1725
1726    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1727                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1728
1729    /*should be using focal lengths but sensor doesn't provide that info now*/
1730    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1731                      &gCamCapability[cameraId]->focal_length,
1732                      1);
1733
1734    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1735                      gCamCapability[cameraId]->apertures,
1736                      gCamCapability[cameraId]->apertures_count);
1737
1738    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1739                gCamCapability[cameraId]->filter_densities,
1740                gCamCapability[cameraId]->filter_densities_count);
1741
1742
1743    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1744                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1745                      gCamCapability[cameraId]->optical_stab_modes_count);
1746
1747    staticInfo.update(ANDROID_LENS_POSITION,
1748                      gCamCapability[cameraId]->lens_position,
1749                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1750
1751    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1752                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1753    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1754                      lens_shading_map_size,
1755                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1756
1757    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1758                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1759    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1760            geo_correction_map_size,
1761            sizeof(geo_correction_map_size)/sizeof(int32_t));
1762
1763    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1764                       gCamCapability[cameraId]->geo_correction_map,
1765                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1766
1767    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1768            gCamCapability[cameraId]->sensor_physical_size, 2);
1769
1770    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1771            gCamCapability[cameraId]->exposure_time_range, 2);
1772
1773    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1774            &gCamCapability[cameraId]->max_frame_duration, 1);
1775
1776
1777    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1778                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1779
1780    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1781                                               gCamCapability[cameraId]->pixel_array_size.height};
1782    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1783                      pixel_array_size, 2);
1784
1785    int32_t active_array_size[] = {0, 0,
1786                                                gCamCapability[cameraId]->active_array_size.width,
1787                                                gCamCapability[cameraId]->active_array_size.height};
1788    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1789                      active_array_size, 4);
1790
1791    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1792            &gCamCapability[cameraId]->white_level, 1);
1793
1794    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1795            gCamCapability[cameraId]->black_level_pattern, 4);
1796
1797    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1798                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1799
1800    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1801                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1802
1803    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1804                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1805    /*hardcode 0 for now*/
1806    int32_t max_face_count = 0;
1807    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1808                      &max_face_count, 1);
1809
1810    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1811                      &gCamCapability[cameraId]->histogram_size, 1);
1812
1813    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1814            &gCamCapability[cameraId]->max_histogram_count, 1);
1815
1816    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1817                                                gCamCapability[cameraId]->sharpness_map_size.height};
1818
1819    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1820            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1821
1822    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1823            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1824
1825
1826    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1827                      &gCamCapability[cameraId]->raw_min_duration,
1828                       1);
1829
1830    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1831                                                HAL_PIXEL_FORMAT_BLOB};
1832    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1833    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1834                      scalar_formats,
1835                      scalar_formats_count);
1836
1837    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1838    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1839              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1840              available_processed_sizes);
1841    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1842                available_processed_sizes,
1843                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1844    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1845                      &gCamCapability[cameraId]->min_duration[0],
1846                      gCamCapability[cameraId]->supported_sizes_tbl_cnt);
1847
1848    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1849    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1850                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1851                 available_fps_ranges);
1852    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1853            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1854
1855    camera_metadata_rational exposureCompensationStep = {
1856            gCamCapability[cameraId]->exp_compensation_step.numerator,
1857            gCamCapability[cameraId]->exp_compensation_step.denominator};
1858    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1859                      &exposureCompensationStep, 1);
1860
1861    /*TO DO*/
1862    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1863    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1864                      availableVstabModes, sizeof(availableVstabModes));
1865
1866    /*HAL 1 and HAL 3 common*/
1867    float maxZoom = 4;
1868    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1869            &maxZoom, 1);
1870
1871    int32_t max3aRegions = 1;
1872    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1873            &max3aRegions, 1);
1874
1875    uint8_t availableFaceDetectModes[] = {
1876            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1877    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1878                      availableFaceDetectModes,
1879                      sizeof(availableFaceDetectModes));
1880
1881    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1882                                       gCamCapability[cameraId]->raw_dim.height};
1883    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1884                      raw_size,
1885                      sizeof(raw_size)/sizeof(uint32_t));
1886
1887    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1888                                                        gCamCapability[cameraId]->exposure_compensation_max};
1889    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1890            exposureCompensationRange,
1891            sizeof(exposureCompensationRange)/sizeof(int32_t));
1892
1893    uint8_t lensFacing = (facingBack) ?
1894            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1895    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1896
1897    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1898    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1899              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1900              available_jpeg_sizes);
1901    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1902                available_jpeg_sizes,
1903                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1904
1905    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1906                      available_thumbnail_sizes,
1907                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1908
1909    int32_t max_jpeg_size = 0;
1910    int temp_width, temp_height;
1911    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1912        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1913        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1914        if (temp_width * temp_height > max_jpeg_size ) {
1915            max_jpeg_size = temp_width * temp_height;
1916        }
1917    }
1918    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1919    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1920                      &max_jpeg_size, 1);
1921
1922    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1923    int32_t size = 0;
1924    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1925        int val = lookupFwkName(EFFECT_MODES_MAP,
1926                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1927                                   gCamCapability[cameraId]->supported_effects[i]);
1928        if (val != NAME_NOT_FOUND) {
1929            avail_effects[size] = (uint8_t)val;
1930            size++;
1931        }
1932    }
1933    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1934                      avail_effects,
1935                      size);
1936
1937    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1938    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1939    int32_t supported_scene_modes_cnt = 0;
1940    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1941        int val = lookupFwkName(SCENE_MODES_MAP,
1942                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1943                                gCamCapability[cameraId]->supported_scene_modes[i]);
1944        if (val != NAME_NOT_FOUND) {
1945            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1946            supported_indexes[supported_scene_modes_cnt] = i;
1947            supported_scene_modes_cnt++;
1948        }
1949    }
1950
1951    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1952                      avail_scene_modes,
1953                      supported_scene_modes_cnt);
1954
1955    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1956    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1957                      supported_scene_modes_cnt,
1958                      scene_mode_overrides,
1959                      supported_indexes,
1960                      cameraId);
1961    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1962                      scene_mode_overrides,
1963                      supported_scene_modes_cnt*3);
1964
1965    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1966    size = 0;
1967    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1968        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1969                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1970                                 gCamCapability[cameraId]->supported_antibandings[i]);
1971        if (val != NAME_NOT_FOUND) {
1972            avail_antibanding_modes[size] = (uint8_t)val;
1973            size++;
1974        }
1975
1976    }
1977    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1978                      avail_antibanding_modes,
1979                      size);
1980
1981    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1982    size = 0;
1983    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1984        int val = lookupFwkName(FOCUS_MODES_MAP,
1985                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1986                                gCamCapability[cameraId]->supported_focus_modes[i]);
1987        if (val != NAME_NOT_FOUND) {
1988            avail_af_modes[size] = (uint8_t)val;
1989            size++;
1990        }
1991    }
1992    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1993                      avail_af_modes,
1994                      size);
1995
1996    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1997    size = 0;
1998    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1999        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2000                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2001                                    gCamCapability[cameraId]->supported_white_balances[i]);
2002        if (val != NAME_NOT_FOUND) {
2003            avail_awb_modes[size] = (uint8_t)val;
2004            size++;
2005        }
2006    }
2007    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2008                      avail_awb_modes,
2009                      size);
2010
2011    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
2012    size = 0;
2013    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
2014        int val = lookupFwkName(FLASH_MODES_MAP,
2015                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
2016                                gCamCapability[cameraId]->supported_flash_modes[i]);
2017        if (val != NAME_NOT_FOUND) {
2018            avail_flash_modes[size] = (uint8_t)val;
2019            size++;
2020        }
2021    }
2022    static uint8_t flashAvailable = 0;
2023    if (size > 1) {
2024        //flash is supported
2025        flashAvailable = 1;
2026    }
2027    staticInfo.update(ANDROID_FLASH_MODE,
2028                      avail_flash_modes,
2029                      size);
2030
2031    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2032            &flashAvailable, 1);
2033
2034    uint8_t avail_ae_modes[5];
2035    size = 0;
2036    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2037        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2038        size++;
2039    }
2040    if (flashAvailable) {
2041        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2042        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2043        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2044    }
2045    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2046                      avail_ae_modes,
2047                      size);
2048
2049    int32_t sensitivity_range[2];
2050    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2051    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2052    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2053                      sensitivity_range,
2054                      sizeof(sensitivity_range) / sizeof(int32_t));
2055
2056    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2057                      &gCamCapability[cameraId]->max_analog_sensitivity,
2058                      sizeof(int32_t) );
2059    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2060                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2061                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2062
2063    gStaticMetadata[cameraId] = staticInfo.release();
2064    return rc;
2065}
2066
2067/*===========================================================================
2068 * FUNCTION   : makeTable
2069 *
2070 * DESCRIPTION: make a table of sizes
2071 *
2072 * PARAMETERS :
2073 *
2074 *
2075 *==========================================================================*/
2076void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2077                                          int32_t* sizeTable)
2078{
2079    int j = 0;
2080    for (int i = 0; i < size; i++) {
2081        sizeTable[j] = dimTable[i].width;
2082        sizeTable[j+1] = dimTable[i].height;
2083        j+=2;
2084    }
2085}
2086
2087/*===========================================================================
2088 * FUNCTION   : makeFPSTable
2089 *
2090 * DESCRIPTION: make a table of fps ranges
2091 *
2092 * PARAMETERS :
2093 *
2094 *==========================================================================*/
2095void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2096                                          int32_t* fpsRangesTable)
2097{
2098    int j = 0;
2099    for (int i = 0; i < size; i++) {
2100        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2101        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2102        j+=2;
2103    }
2104}
2105
2106/*===========================================================================
2107 * FUNCTION   : makeOverridesList
2108 *
2109 * DESCRIPTION: make a list of scene mode overrides
2110 *
2111 * PARAMETERS :
2112 *
2113 *
2114 *==========================================================================*/
2115void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2116                                                  uint8_t size, uint8_t* overridesList,
2117                                                  uint8_t* supported_indexes,
2118                                                  int camera_id)
2119{
2120    /*daemon will give a list of overrides for all scene modes.
2121      However we should send the fwk only the overrides for the scene modes
2122      supported by the framework*/
2123    int j = 0, index = 0, supt = 0;
2124    uint8_t focus_override;
2125    for (int i = 0; i < size; i++) {
2126        supt = 0;
2127        index = supported_indexes[i];
2128        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2129        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2130                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2131                                                    overridesTable[index].awb_mode);
2132        focus_override = (uint8_t)overridesTable[index].af_mode;
2133        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2134           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2135              supt = 1;
2136              break;
2137           }
2138        }
2139        if (supt) {
2140           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2141                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2142                                              focus_override);
2143        } else {
2144           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2145        }
2146        j+=3;
2147    }
2148}
2149
2150/*===========================================================================
2151 * FUNCTION   : getPreviewHalPixelFormat
2152 *
2153 * DESCRIPTION: convert the format to type recognized by framework
2154 *
2155 * PARAMETERS : format : the format from backend
2156 *
2157 ** RETURN    : format recognized by framework
2158 *
2159 *==========================================================================*/
2160int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2161{
2162    int32_t halPixelFormat;
2163
2164    switch (format) {
2165    case CAM_FORMAT_YUV_420_NV12:
2166        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2167        break;
2168    case CAM_FORMAT_YUV_420_NV21:
2169        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2170        break;
2171    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2172        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2173        break;
2174    case CAM_FORMAT_YUV_420_YV12:
2175        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2176        break;
2177    case CAM_FORMAT_YUV_422_NV16:
2178    case CAM_FORMAT_YUV_422_NV61:
2179    default:
2180        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2181        break;
2182    }
2183    return halPixelFormat;
2184}
2185
2186/*===========================================================================
2187 * FUNCTION   : getSensorSensitivity
2188 *
2189 * DESCRIPTION: convert iso_mode to an integer value
2190 *
2191 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2192 *
2193 ** RETURN    : sensitivity supported by sensor
2194 *
2195 *==========================================================================*/
2196int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2197{
2198    int32_t sensitivity;
2199
2200    switch (iso_mode) {
2201    case CAM_ISO_MODE_100:
2202        sensitivity = 100;
2203        break;
2204    case CAM_ISO_MODE_200:
2205        sensitivity = 200;
2206        break;
2207    case CAM_ISO_MODE_400:
2208        sensitivity = 400;
2209        break;
2210    case CAM_ISO_MODE_800:
2211        sensitivity = 800;
2212        break;
2213    case CAM_ISO_MODE_1600:
2214        sensitivity = 1600;
2215        break;
2216    default:
2217        sensitivity = -1;
2218        break;
2219    }
2220    return sensitivity;
2221}
2222
2223
2224/*===========================================================================
2225 * FUNCTION   : AddSetParmEntryToBatch
2226 *
2227 * DESCRIPTION: add set parameter entry into batch
2228 *
2229 * PARAMETERS :
2230 *   @p_table     : ptr to parameter buffer
2231 *   @paramType   : parameter type
2232 *   @paramLength : length of parameter value
2233 *   @paramValue  : ptr to parameter value
2234 *
2235 * RETURN     : int32_t type of status
2236 *              NO_ERROR  -- success
2237 *              none-zero failure code
2238 *==========================================================================*/
2239int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2240                                                          cam_intf_parm_type_t paramType,
2241                                                          uint32_t paramLength,
2242                                                          void *paramValue)
2243{
2244    int position = paramType;
2245    int current, next;
2246
2247    /*************************************************************************
2248    *                 Code to take care of linking next flags                *
2249    *************************************************************************/
2250    current = GET_FIRST_PARAM_ID(p_table);
2251    if (position == current){
2252        //DO NOTHING
2253    } else if (position < current){
2254        SET_NEXT_PARAM_ID(position, p_table, current);
2255        SET_FIRST_PARAM_ID(p_table, position);
2256    } else {
2257        /* Search for the position in the linked list where we need to slot in*/
2258        while (position > GET_NEXT_PARAM_ID(current, p_table))
2259            current = GET_NEXT_PARAM_ID(current, p_table);
2260
2261        /*If node already exists no need to alter linking*/
2262        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2263            next = GET_NEXT_PARAM_ID(current, p_table);
2264            SET_NEXT_PARAM_ID(current, p_table, position);
2265            SET_NEXT_PARAM_ID(position, p_table, next);
2266        }
2267    }
2268
2269    /*************************************************************************
2270    *                   Copy contents into entry                             *
2271    *************************************************************************/
2272
2273    if (paramLength > sizeof(parm_type_t)) {
2274        ALOGE("%s:Size of input larger than max entry size",__func__);
2275        return BAD_VALUE;
2276    }
2277    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2278    return NO_ERROR;
2279}
2280
2281/*===========================================================================
2282 * FUNCTION   : lookupFwkName
2283 *
2284 * DESCRIPTION: In case the enum is not same in fwk and backend
2285 *              make sure the parameter is correctly propogated
2286 *
2287 * PARAMETERS  :
2288 *   @arr      : map between the two enums
2289 *   @len      : len of the map
2290 *   @hal_name : name of the hal_parm to map
2291 *
2292 * RETURN     : int type of status
2293 *              fwk_name  -- success
2294 *              none-zero failure code
2295 *==========================================================================*/
2296int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2297                                             int len, int hal_name)
2298{
2299
2300    for (int i = 0; i < len; i++) {
2301        if (arr[i].hal_name == hal_name)
2302            return arr[i].fwk_name;
2303    }
2304
2305    /* Not able to find matching framework type is not necessarily
2306     * an error case. This happens when mm-camera supports more attributes
2307     * than the frameworks do */
2308    ALOGD("%s: Cannot find matching framework type", __func__);
2309    return NAME_NOT_FOUND;
2310}
2311
2312/*===========================================================================
2313 * FUNCTION   : lookupHalName
2314 *
2315 * DESCRIPTION: In case the enum is not same in fwk and backend
2316 *              make sure the parameter is correctly propogated
2317 *
2318 * PARAMETERS  :
2319 *   @arr      : map between the two enums
2320 *   @len      : len of the map
2321 *   @fwk_name : name of the hal_parm to map
2322 *
2323 * RETURN     : int32_t type of status
2324 *              hal_name  -- success
2325 *              none-zero failure code
2326 *==========================================================================*/
2327int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2328                                             int len, int fwk_name)
2329{
2330    for (int i = 0; i < len; i++) {
2331       if (arr[i].fwk_name == fwk_name)
2332           return arr[i].hal_name;
2333    }
2334    ALOGE("%s: Cannot find matching hal type", __func__);
2335    return NAME_NOT_FOUND;
2336}
2337
2338/*===========================================================================
2339 * FUNCTION   : getCapabilities
2340 *
2341 * DESCRIPTION: query camera capabilities
2342 *
2343 * PARAMETERS :
2344 *   @cameraId  : camera Id
2345 *   @info      : camera info struct to be filled in with camera capabilities
2346 *
2347 * RETURN     : int32_t type of status
2348 *              NO_ERROR  -- success
2349 *              none-zero failure code
2350 *==========================================================================*/
2351int QCamera3HardwareInterface::getCamInfo(int cameraId,
2352                                    struct camera_info *info)
2353{
2354    int rc = 0;
2355
2356    if (NULL == gCamCapability[cameraId]) {
2357        rc = initCapabilities(cameraId);
2358        if (rc < 0) {
2359            //pthread_mutex_unlock(&g_camlock);
2360            return rc;
2361        }
2362    }
2363
2364    if (NULL == gStaticMetadata[cameraId]) {
2365        rc = initStaticMetadata(cameraId);
2366        if (rc < 0) {
2367            return rc;
2368        }
2369    }
2370
2371    switch(gCamCapability[cameraId]->position) {
2372    case CAM_POSITION_BACK:
2373        info->facing = CAMERA_FACING_BACK;
2374        break;
2375
2376    case CAM_POSITION_FRONT:
2377        info->facing = CAMERA_FACING_FRONT;
2378        break;
2379
2380    default:
2381        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2382        rc = -1;
2383        break;
2384    }
2385
2386
2387    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2388    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2389    info->static_camera_characteristics = gStaticMetadata[cameraId];
2390
2391    return rc;
2392}
2393
2394/*===========================================================================
2395 * FUNCTION   : translateMetadata
2396 *
2397 * DESCRIPTION: translate the metadata into camera_metadata_t
2398 *
2399 * PARAMETERS : type of the request
2400 *
2401 *
2402 * RETURN     : success: camera_metadata_t*
2403 *              failure: NULL
2404 *
2405 *==========================================================================*/
2406camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2407{
2408    pthread_mutex_lock(&mMutex);
2409
2410    if (mDefaultMetadata[type] != NULL) {
2411        pthread_mutex_unlock(&mMutex);
2412        return mDefaultMetadata[type];
2413    }
2414    //first time we are handling this request
2415    //fill up the metadata structure using the wrapper class
2416    CameraMetadata settings;
2417    //translate from cam_capability_t to camera_metadata_tag_t
2418    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2419    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2420
2421    /*control*/
2422
2423    uint8_t controlIntent = 0;
2424    switch (type) {
2425      case CAMERA3_TEMPLATE_PREVIEW:
2426        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2427        break;
2428      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2429        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2430        break;
2431      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2432        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2433        break;
2434      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2435        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2436        break;
2437      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2438        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2439        break;
2440      default:
2441        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2442        break;
2443    }
2444    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2445
2446    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2447            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2448
2449    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2450    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2451
2452    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2453    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2454
2455    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2456    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2457
2458    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2459    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2460
2461    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2462    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2463
2464    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2465    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2466
2467    static uint8_t focusMode;
2468    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2469        ALOGE("%s: Setting focus mode to auto", __func__);
2470        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2471    } else {
2472        ALOGE("%s: Setting focus mode to off", __func__);
2473        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2474    }
2475    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2476
2477    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2478    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2479
2480    /*flash*/
2481    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2482    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2483
2484
2485    /* lens */
2486    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2487    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2488
2489    if (gCamCapability[mCameraId]->filter_densities_count) {
2490        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2491        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2492                        gCamCapability[mCameraId]->filter_densities_count);
2493    }
2494
2495    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2496    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2497
2498    mDefaultMetadata[type] = settings.release();
2499
2500    pthread_mutex_unlock(&mMutex);
2501    return mDefaultMetadata[type];
2502}
2503
2504/*===========================================================================
2505 * FUNCTION   : setFrameParameters
2506 *
2507 * DESCRIPTION: set parameters per frame as requested in the metadata from
2508 *              framework
2509 *
2510 * PARAMETERS :
2511 *   @settings  : frame settings information from framework
2512 *
2513 *
2514 * RETURN     : success: NO_ERROR
2515 *              failure:
2516 *==========================================================================*/
2517int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2518                                                  const camera_metadata_t *settings)
2519{
2520    /*translate from camera_metadata_t type to parm_type_t*/
2521    int rc = 0;
2522    if (settings == NULL && mFirstRequest) {
2523        /*settings cannot be null for the first request*/
2524        return BAD_VALUE;
2525    }
2526
2527    int32_t hal_version = CAM_HAL_V3;
2528
2529    memset(mParameters, 0, sizeof(parm_buffer_t));
2530    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2531    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2532                sizeof(hal_version), &hal_version);
2533
2534    /*we need to update the frame number in the parameters*/
2535    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2536                                sizeof(frame_id), &frame_id);
2537    if (rc < 0) {
2538        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2539        return BAD_VALUE;
2540    }
2541
2542    if(settings != NULL){
2543        rc = translateMetadataToParameters(settings);
2544    }
2545    /*set the parameters to backend*/
2546    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2547    return rc;
2548}
2549
2550/*===========================================================================
2551 * FUNCTION   : translateMetadataToParameters
2552 *
2553 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2554 *
2555 *
2556 * PARAMETERS :
2557 *   @settings  : frame settings information from framework
2558 *
2559 *
2560 * RETURN     : success: NO_ERROR
2561 *              failure:
2562 *==========================================================================*/
2563int QCamera3HardwareInterface::translateMetadataToParameters
2564                                  (const camera_metadata_t *settings)
2565{
2566    int rc = 0;
2567    CameraMetadata frame_settings;
2568    frame_settings = settings;
2569
2570
2571    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2572        int32_t antibandingMode =
2573            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2574        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2575                sizeof(antibandingMode), &antibandingMode);
2576    }
2577
2578    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2579        int32_t expCompensation = frame_settings.find(
2580            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2581        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2582            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2583        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2584            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2585        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2586          sizeof(expCompensation), &expCompensation);
2587    }
2588
2589    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2590        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2591        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2592                sizeof(aeLock), &aeLock);
2593    }
2594    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2595        cam_fps_range_t fps_range;
2596        fps_range.min_fps =
2597            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2598        fps_range.max_fps =
2599            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2600        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2601                sizeof(fps_range), &fps_range);
2602    }
2603
2604    float focalDistance = -1.0;
2605    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2606        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2607        rc = AddSetParmEntryToBatch(mParameters,
2608                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2609                sizeof(focalDistance), &focalDistance);
2610    }
2611
2612    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2613        uint8_t fwk_focusMode =
2614            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2615        uint8_t focusMode;
2616        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2617            focusMode = CAM_FOCUS_MODE_INFINITY;
2618        } else{
2619         focusMode = lookupHalName(FOCUS_MODES_MAP,
2620                                   sizeof(FOCUS_MODES_MAP),
2621                                   fwk_focusMode);
2622        }
2623        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2624                sizeof(focusMode), &focusMode);
2625    }
2626
2627    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2628        uint8_t awbLock =
2629            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2630        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2631                sizeof(awbLock), &awbLock);
2632    }
2633
2634    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2635        uint8_t fwk_whiteLevel =
2636            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2637        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2638                sizeof(WHITE_BALANCE_MODES_MAP),
2639                fwk_whiteLevel);
2640        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2641                sizeof(whiteLevel), &whiteLevel);
2642    }
2643
2644    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2645        uint8_t fwk_effectMode =
2646            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2647        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2648                sizeof(EFFECT_MODES_MAP),
2649                fwk_effectMode);
2650        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2651                sizeof(effectMode), &effectMode);
2652    }
2653
2654    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2655        uint8_t fwk_aeMode =
2656            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2657        uint8_t aeMode;
2658        int32_t redeye;
2659
2660        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2661            aeMode = CAM_AE_MODE_OFF;
2662        } else {
2663            aeMode = CAM_AE_MODE_ON;
2664        }
2665        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2666            redeye = 1;
2667        } else {
2668            redeye = 0;
2669        }
2670
2671        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2672                                          sizeof(AE_FLASH_MODE_MAP),
2673                                          fwk_aeMode);
2674        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2675                sizeof(aeMode), &aeMode);
2676        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2677                sizeof(flashMode), &flashMode);
2678        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2679                sizeof(redeye), &redeye);
2680    }
2681
2682    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2683        uint8_t colorCorrectMode =
2684            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2685        rc =
2686            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2687                    sizeof(colorCorrectMode), &colorCorrectMode);
2688    }
2689    cam_trigger_t aecTrigger;
2690    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2691    aecTrigger.trigger_id = -1;
2692    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2693        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2694        aecTrigger.trigger =
2695            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2696        aecTrigger.trigger_id =
2697            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2698    }
2699    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2700                                sizeof(aecTrigger), &aecTrigger);
2701
2702    /*af_trigger must come with a trigger id*/
2703    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2704        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2705        cam_trigger_t af_trigger;
2706        af_trigger.trigger =
2707            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2708        af_trigger.trigger_id =
2709            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2710        rc = AddSetParmEntryToBatch(mParameters,
2711                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2712    }
2713
2714    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2715        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2716        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2717                sizeof(metaMode), &metaMode);
2718        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2719           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2720           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2721                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2722                                             fwk_sceneMode);
2723           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2724                sizeof(sceneMode), &sceneMode);
2725        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2726           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2727           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2728                sizeof(sceneMode), &sceneMode);
2729        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2730           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2731           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2732                sizeof(sceneMode), &sceneMode);
2733        }
2734    }
2735
2736    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2737        int32_t demosaic =
2738            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2739        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2740                sizeof(demosaic), &demosaic);
2741    }
2742
2743    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2744        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2745        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2746                sizeof(edgeMode), &edgeMode);
2747    }
2748
2749    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2750        int32_t edgeStrength =
2751            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2752        rc = AddSetParmEntryToBatch(mParameters,
2753                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2754    }
2755
2756    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2757        int32_t respectFlashMode = 1;
2758        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2759            uint8_t fwk_aeMode =
2760                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2761            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
2762                respectFlashMode = 0;
2763                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
2764                    __func__);
2765            }
2766        }
2767        if (respectFlashMode) {
2768            uint8_t flashMode =
2769                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2770            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
2771                                          sizeof(FLASH_MODES_MAP),
2772                                          flashMode);
2773            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
2774            // To check: CAM_INTF_META_FLASH_MODE usage
2775            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2776                          sizeof(flashMode), &flashMode);
2777        }
2778    }
2779
2780    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2781        uint8_t flashPower =
2782            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2783        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2784                sizeof(flashPower), &flashPower);
2785    }
2786
2787    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2788        int64_t flashFiringTime =
2789            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2790        rc = AddSetParmEntryToBatch(mParameters,
2791                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2792    }
2793
2794    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2795        uint8_t geometricMode =
2796            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2797        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2798                sizeof(geometricMode), &geometricMode);
2799    }
2800
2801    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2802        uint8_t geometricStrength =
2803            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2804        rc = AddSetParmEntryToBatch(mParameters,
2805                CAM_INTF_META_GEOMETRIC_STRENGTH,
2806                sizeof(geometricStrength), &geometricStrength);
2807    }
2808
2809    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2810        uint8_t hotPixelMode =
2811            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2812        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2813                sizeof(hotPixelMode), &hotPixelMode);
2814    }
2815
2816    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2817        float lensAperture =
2818            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2819        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2820                sizeof(lensAperture), &lensAperture);
2821    }
2822
2823    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2824        float filterDensity =
2825            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2826        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2827                sizeof(filterDensity), &filterDensity);
2828    }
2829
2830    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2831        float focalLength =
2832            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2833        rc = AddSetParmEntryToBatch(mParameters,
2834                CAM_INTF_META_LENS_FOCAL_LENGTH,
2835                sizeof(focalLength), &focalLength);
2836    }
2837
2838    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2839        uint8_t optStabMode =
2840            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2841        rc = AddSetParmEntryToBatch(mParameters,
2842                CAM_INTF_META_LENS_OPT_STAB_MODE,
2843                sizeof(optStabMode), &optStabMode);
2844    }
2845
2846    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2847        uint8_t noiseRedMode =
2848            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2849        rc = AddSetParmEntryToBatch(mParameters,
2850                CAM_INTF_META_NOISE_REDUCTION_MODE,
2851                sizeof(noiseRedMode), &noiseRedMode);
2852    }
2853
2854    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2855        uint8_t noiseRedStrength =
2856            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2857        rc = AddSetParmEntryToBatch(mParameters,
2858                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2859                sizeof(noiseRedStrength), &noiseRedStrength);
2860    }
2861
2862    cam_crop_region_t scalerCropRegion;
2863    bool scalerCropSet = false;
2864    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2865        scalerCropRegion.left =
2866            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2867        scalerCropRegion.top =
2868            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2869        scalerCropRegion.width =
2870            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2871        scalerCropRegion.height =
2872            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2873        rc = AddSetParmEntryToBatch(mParameters,
2874                CAM_INTF_META_SCALER_CROP_REGION,
2875                sizeof(scalerCropRegion), &scalerCropRegion);
2876        scalerCropSet = true;
2877    }
2878
2879    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2880        int64_t sensorExpTime =
2881            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2882        rc = AddSetParmEntryToBatch(mParameters,
2883                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2884                sizeof(sensorExpTime), &sensorExpTime);
2885    }
2886
2887    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2888        int64_t sensorFrameDuration =
2889            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2890        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
2891            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
2892        rc = AddSetParmEntryToBatch(mParameters,
2893                CAM_INTF_META_SENSOR_FRAME_DURATION,
2894                sizeof(sensorFrameDuration), &sensorFrameDuration);
2895    }
2896
2897    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2898        int32_t sensorSensitivity =
2899            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2900        if (sensorSensitivity <
2901                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
2902            sensorSensitivity =
2903                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
2904        if (sensorSensitivity >
2905                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
2906            sensorSensitivity =
2907                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
2908        rc = AddSetParmEntryToBatch(mParameters,
2909                CAM_INTF_META_SENSOR_SENSITIVITY,
2910                sizeof(sensorSensitivity), &sensorSensitivity);
2911    }
2912
2913    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2914        int32_t shadingMode =
2915            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2916        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2917                sizeof(shadingMode), &shadingMode);
2918    }
2919
2920    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2921        uint8_t shadingStrength =
2922            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2923        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2924                sizeof(shadingStrength), &shadingStrength);
2925    }
2926
2927    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2928        uint8_t facedetectMode =
2929            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2930        rc = AddSetParmEntryToBatch(mParameters,
2931                CAM_INTF_META_STATS_FACEDETECT_MODE,
2932                sizeof(facedetectMode), &facedetectMode);
2933    }
2934
2935    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2936        uint8_t histogramMode =
2937            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2938        rc = AddSetParmEntryToBatch(mParameters,
2939                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2940                sizeof(histogramMode), &histogramMode);
2941    }
2942
2943    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2944        uint8_t sharpnessMapMode =
2945            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2946        rc = AddSetParmEntryToBatch(mParameters,
2947                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2948                sizeof(sharpnessMapMode), &sharpnessMapMode);
2949    }
2950
2951    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2952        uint8_t tonemapMode =
2953            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2954        rc = AddSetParmEntryToBatch(mParameters,
2955                CAM_INTF_META_TONEMAP_MODE,
2956                sizeof(tonemapMode), &tonemapMode);
2957    }
2958
2959    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2960        uint8_t captureIntent =
2961            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2962        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2963                sizeof(captureIntent), &captureIntent);
2964    }
2965
2966    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2967        cam_area_t roi;
2968        bool reset = true;
2969        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2970        if (scalerCropSet) {
2971            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2972        }
2973        if (reset) {
2974            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2975                    sizeof(roi), &roi);
2976        }
2977    }
2978
2979    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2980        cam_area_t roi;
2981        bool reset = true;
2982        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2983        if (scalerCropSet) {
2984            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2985        }
2986        if (reset) {
2987            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2988                    sizeof(roi), &roi);
2989        }
2990    }
2991
2992    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2993        cam_area_t roi;
2994        bool reset = true;
2995        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2996        if (scalerCropSet) {
2997            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2998        }
2999        if (reset) {
3000            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3001                    sizeof(roi), &roi);
3002        }
3003    }
3004    return rc;
3005}
3006
3007/*===========================================================================
3008 * FUNCTION   : getJpegSettings
3009 *
3010 * DESCRIPTION: save the jpeg settings in the HAL
3011 *
3012 *
3013 * PARAMETERS :
3014 *   @settings  : frame settings information from framework
3015 *
3016 *
3017 * RETURN     : success: NO_ERROR
3018 *              failure:
3019 *==========================================================================*/
3020int QCamera3HardwareInterface::getJpegSettings
3021                                  (const camera_metadata_t *settings)
3022{
3023    if (mJpegSettings) {
3024        if (mJpegSettings->gps_timestamp) {
3025            free(mJpegSettings->gps_timestamp);
3026            mJpegSettings->gps_timestamp = NULL;
3027        }
3028        if (mJpegSettings->gps_coordinates) {
3029            for (int i = 0; i < 3; i++) {
3030                free(mJpegSettings->gps_coordinates[i]);
3031                mJpegSettings->gps_coordinates[i] = NULL;
3032            }
3033        }
3034        free(mJpegSettings);
3035        mJpegSettings = NULL;
3036    }
3037    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3038    CameraMetadata jpeg_settings;
3039    jpeg_settings = settings;
3040
3041    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3042        mJpegSettings->jpeg_orientation =
3043            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3044    } else {
3045        mJpegSettings->jpeg_orientation = 0;
3046    }
3047    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3048        mJpegSettings->jpeg_quality =
3049            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3050    } else {
3051        mJpegSettings->jpeg_quality = 85;
3052    }
3053    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3054        mJpegSettings->thumbnail_size.width =
3055            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3056        mJpegSettings->thumbnail_size.height =
3057            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3058    } else {
3059        mJpegSettings->thumbnail_size.width = 0;
3060        mJpegSettings->thumbnail_size.height = 0;
3061    }
3062    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3063        for (int i = 0; i < 3; i++) {
3064            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3065            *(mJpegSettings->gps_coordinates[i]) =
3066                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3067        }
3068    } else{
3069       for (int i = 0; i < 3; i++) {
3070            mJpegSettings->gps_coordinates[i] = NULL;
3071        }
3072    }
3073
3074    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3075        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3076        *(mJpegSettings->gps_timestamp) =
3077            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3078    } else {
3079        mJpegSettings->gps_timestamp = NULL;
3080    }
3081
3082    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3083        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3084        for (int i = 0; i < len; i++) {
3085            mJpegSettings->gps_processing_method[i] =
3086                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3087        }
3088        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3089            mJpegSettings->gps_processing_method[len] = '\0';
3090        }
3091    } else {
3092        mJpegSettings->gps_processing_method[0] = '\0';
3093    }
3094
3095    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3096        mJpegSettings->sensor_sensitivity =
3097            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3098    } else {
3099        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3100    }
3101
3102    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3103        mJpegSettings->lens_focal_length =
3104            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3105    }
3106    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3107        mJpegSettings->exposure_compensation =
3108            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3109    }
3110    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3111    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3112    mJpegSettings->is_jpeg_format = true;
3113    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3114    return 0;
3115}
3116
3117/*===========================================================================
3118 * FUNCTION   : captureResultCb
3119 *
3120 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3121 *
3122 * PARAMETERS :
3123 *   @frame  : frame information from mm-camera-interface
3124 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3125 *   @userdata: userdata
3126 *
3127 * RETURN     : NONE
3128 *==========================================================================*/
3129void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3130                camera3_stream_buffer_t *buffer,
3131                uint32_t frame_number, void *userdata)
3132{
3133    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3134    if (hw == NULL) {
3135        ALOGE("%s: Invalid hw %p", __func__, hw);
3136        return;
3137    }
3138
3139    hw->captureResultCb(metadata, buffer, frame_number);
3140    return;
3141}
3142
3143
3144/*===========================================================================
3145 * FUNCTION   : initialize
3146 *
3147 * DESCRIPTION: Pass framework callback pointers to HAL
3148 *
3149 * PARAMETERS :
3150 *
3151 *
3152 * RETURN     : Success : 0
3153 *              Failure: -ENODEV
3154 *==========================================================================*/
3155
3156int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3157                                  const camera3_callback_ops_t *callback_ops)
3158{
3159    ALOGV("%s: E", __func__);
3160    QCamera3HardwareInterface *hw =
3161        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3162    if (!hw) {
3163        ALOGE("%s: NULL camera device", __func__);
3164        return -ENODEV;
3165    }
3166
3167    int rc = hw->initialize(callback_ops);
3168    ALOGV("%s: X", __func__);
3169    return rc;
3170}
3171
3172/*===========================================================================
3173 * FUNCTION   : configure_streams
3174 *
3175 * DESCRIPTION:
3176 *
3177 * PARAMETERS :
3178 *
3179 *
3180 * RETURN     : Success: 0
3181 *              Failure: -EINVAL (if stream configuration is invalid)
3182 *                       -ENODEV (fatal error)
3183 *==========================================================================*/
3184
3185int QCamera3HardwareInterface::configure_streams(
3186        const struct camera3_device *device,
3187        camera3_stream_configuration_t *stream_list)
3188{
3189    ALOGV("%s: E", __func__);
3190    QCamera3HardwareInterface *hw =
3191        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3192    if (!hw) {
3193        ALOGE("%s: NULL camera device", __func__);
3194        return -ENODEV;
3195    }
3196    int rc = hw->configureStreams(stream_list);
3197    ALOGV("%s: X", __func__);
3198    return rc;
3199}
3200
3201/*===========================================================================
3202 * FUNCTION   : register_stream_buffers
3203 *
3204 * DESCRIPTION: Register stream buffers with the device
3205 *
3206 * PARAMETERS :
3207 *
3208 * RETURN     :
3209 *==========================================================================*/
3210int QCamera3HardwareInterface::register_stream_buffers(
3211        const struct camera3_device *device,
3212        const camera3_stream_buffer_set_t *buffer_set)
3213{
3214    ALOGV("%s: E", __func__);
3215    QCamera3HardwareInterface *hw =
3216        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3217    if (!hw) {
3218        ALOGE("%s: NULL camera device", __func__);
3219        return -ENODEV;
3220    }
3221    int rc = hw->registerStreamBuffers(buffer_set);
3222    ALOGV("%s: X", __func__);
3223    return rc;
3224}
3225
3226/*===========================================================================
3227 * FUNCTION   : construct_default_request_settings
3228 *
3229 * DESCRIPTION: Configure a settings buffer to meet the required use case
3230 *
3231 * PARAMETERS :
3232 *
3233 *
3234 * RETURN     : Success: Return valid metadata
3235 *              Failure: Return NULL
3236 *==========================================================================*/
3237const camera_metadata_t* QCamera3HardwareInterface::
3238    construct_default_request_settings(const struct camera3_device *device,
3239                                        int type)
3240{
3241
3242    ALOGV("%s: E", __func__);
3243    camera_metadata_t* fwk_metadata = NULL;
3244    QCamera3HardwareInterface *hw =
3245        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3246    if (!hw) {
3247        ALOGE("%s: NULL camera device", __func__);
3248        return NULL;
3249    }
3250
3251    fwk_metadata = hw->translateCapabilityToMetadata(type);
3252
3253    ALOGV("%s: X", __func__);
3254    return fwk_metadata;
3255}
3256
3257/*===========================================================================
3258 * FUNCTION   : process_capture_request
3259 *
3260 * DESCRIPTION:
3261 *
3262 * PARAMETERS :
3263 *
3264 *
3265 * RETURN     :
3266 *==========================================================================*/
3267int QCamera3HardwareInterface::process_capture_request(
3268                    const struct camera3_device *device,
3269                    camera3_capture_request_t *request)
3270{
3271    ALOGV("%s: E", __func__);
3272    QCamera3HardwareInterface *hw =
3273        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3274    if (!hw) {
3275        ALOGE("%s: NULL camera device", __func__);
3276        return -EINVAL;
3277    }
3278
3279    int rc = hw->processCaptureRequest(request);
3280    ALOGV("%s: X", __func__);
3281    return rc;
3282}
3283
3284/*===========================================================================
3285 * FUNCTION   : get_metadata_vendor_tag_ops
3286 *
3287 * DESCRIPTION:
3288 *
3289 * PARAMETERS :
3290 *
3291 *
3292 * RETURN     :
3293 *==========================================================================*/
3294
3295void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3296                const struct camera3_device *device,
3297                vendor_tag_query_ops_t* ops)
3298{
3299    ALOGV("%s: E", __func__);
3300    QCamera3HardwareInterface *hw =
3301        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3302    if (!hw) {
3303        ALOGE("%s: NULL camera device", __func__);
3304        return;
3305    }
3306
3307    hw->getMetadataVendorTagOps(ops);
3308    ALOGV("%s: X", __func__);
3309    return;
3310}
3311
3312/*===========================================================================
3313 * FUNCTION   : dump
3314 *
3315 * DESCRIPTION:
3316 *
3317 * PARAMETERS :
3318 *
3319 *
3320 * RETURN     :
3321 *==========================================================================*/
3322
3323void QCamera3HardwareInterface::dump(
3324                const struct camera3_device *device, int fd)
3325{
3326    ALOGV("%s: E", __func__);
3327    QCamera3HardwareInterface *hw =
3328        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3329    if (!hw) {
3330        ALOGE("%s: NULL camera device", __func__);
3331        return;
3332    }
3333
3334    hw->dump(fd);
3335    ALOGV("%s: X", __func__);
3336    return;
3337}
3338
3339/*===========================================================================
3340 * FUNCTION   : close_camera_device
3341 *
3342 * DESCRIPTION:
3343 *
3344 * PARAMETERS :
3345 *
3346 *
3347 * RETURN     :
3348 *==========================================================================*/
3349int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3350{
3351    ALOGV("%s: E", __func__);
3352    int ret = NO_ERROR;
3353    QCamera3HardwareInterface *hw =
3354        reinterpret_cast<QCamera3HardwareInterface *>(
3355            reinterpret_cast<camera3_device_t *>(device)->priv);
3356    if (!hw) {
3357        ALOGE("NULL camera device");
3358        return BAD_VALUE;
3359    }
3360    delete hw;
3361
3362    pthread_mutex_lock(&mCameraSessionLock);
3363    mCameraSessionActive = 0;
3364    pthread_mutex_unlock(&mCameraSessionLock);
3365    ALOGV("%s: X", __func__);
3366    return ret;
3367}
3368
3369/*===========================================================================
3370 * FUNCTION   : getWaveletDenoiseProcessPlate
3371 *
3372 * DESCRIPTION: query wavelet denoise process plate
3373 *
3374 * PARAMETERS : None
3375 *
3376 * RETURN     : WNR prcocess plate vlaue
3377 *==========================================================================*/
3378cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3379{
3380    char prop[PROPERTY_VALUE_MAX];
3381    memset(prop, 0, sizeof(prop));
3382    property_get("persist.denoise.process.plates", prop, "0");
3383    int processPlate = atoi(prop);
3384    switch(processPlate) {
3385    case 0:
3386        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3387    case 1:
3388        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3389    case 2:
3390        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3391    case 3:
3392        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3393    default:
3394        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3395    }
3396}
3397
3398/*===========================================================================
3399 * FUNCTION   : needRotationReprocess
3400 *
3401 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3402 *
3403 * PARAMETERS : none
3404 *
3405 * RETURN     : true: needed
3406 *              false: no need
3407 *==========================================================================*/
3408bool QCamera3HardwareInterface::needRotationReprocess()
3409{
3410    // TODO: hack here to return false to avoid reprocess
3411    // Need to be enabled after PP is enabled
3412    return false;
3413
3414    if (!mJpegSettings->is_jpeg_format) {
3415        // RAW image, no need to reprocess
3416        return false;
3417    }
3418
3419    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3420        mJpegSettings->jpeg_orientation > 0) {
3421        // current rotation is not zero, and pp has the capability to process rotation
3422        ALOGD("%s: need do reprocess for rotation", __func__);
3423        return true;
3424    }
3425
3426    return false;
3427}
3428
3429/*===========================================================================
3430 * FUNCTION   : addOnlineReprocChannel
3431 *
3432 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3433 *              coming from input channel
3434 *
3435 * PARAMETERS :
3436 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3437 *
3438 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3439 *==========================================================================*/
3440QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3441                                                      QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3442{
3443    int32_t rc = NO_ERROR;
3444    QCamera3ReprocessChannel *pChannel = NULL;
3445    if (pInputChannel == NULL) {
3446        ALOGE("%s: input channel obj is NULL", __func__);
3447        return NULL;
3448    }
3449
3450    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3451            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3452    if (NULL == pChannel) {
3453        ALOGE("%s: no mem for reprocess channel", __func__);
3454        return NULL;
3455    }
3456
3457    // Capture channel, only need snapshot and postview streams start together
3458    mm_camera_channel_attr_t attr;
3459    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3460    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3461    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3462    rc = pChannel->initialize();
3463    if (rc != NO_ERROR) {
3464        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3465        delete pChannel;
3466        return NULL;
3467    }
3468
3469    // pp feature config
3470    cam_pp_feature_config_t pp_config;
3471    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3472    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3473        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3474        pp_config.sharpness = 10;
3475    }
3476
3477    if (isWNREnabled()) {
3478        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3479        pp_config.denoise2d.denoise_enable = 1;
3480        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3481    }
3482    if (needRotationReprocess()) {
3483        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3484        int rotation = mJpegSettings->jpeg_orientation;
3485        if (rotation == 0) {
3486            pp_config.rotation = ROTATE_0;
3487        } else if (rotation == 90) {
3488            pp_config.rotation = ROTATE_90;
3489        } else if (rotation == 180) {
3490            pp_config.rotation = ROTATE_180;
3491        } else if (rotation == 270) {
3492            pp_config.rotation = ROTATE_270;
3493        }
3494    }
3495
3496   rc = pChannel->addReprocStreamsFromSource(pp_config,
3497                                             pInputChannel,
3498                                             mMetadataChannel);
3499
3500    if (rc != NO_ERROR) {
3501        delete pChannel;
3502        return NULL;
3503    }
3504    return pChannel;
3505}
3506
3507/*===========================================================================
3508 * FUNCTION   : needReprocess
3509 *
3510 * DESCRIPTION: if reprocess is needed
3511 *
3512 * PARAMETERS : none
3513 *
3514 * RETURN     : true: needed
3515 *              false: no need
3516 *==========================================================================*/
3517bool QCamera3HardwareInterface::needReprocess()
3518{
3519    // TODO: hack here to return false to avoid reprocess
3520    // Need to be enabled after PP is enabled
3521    return false;
3522
3523    if (!mJpegSettings->is_jpeg_format) {
3524        // RAW image, no need to reprocess
3525        return false;
3526    }
3527
3528    if (((gCamCapability[mCameraId]->min_required_pp_mask > 0) ||
3529         isWNREnabled())) {
3530        // TODO: add for ZSL HDR later
3531        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3532        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3533        return true;
3534    }
3535
3536    return needRotationReprocess();
3537}
3538
3539int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3540{
3541    return gCamCapability[mCameraId]->min_num_pp_bufs;
3542}
3543
3544}; //end namespace qcamera
3545