QCamera3HWI.cpp revision 152ee3049941f9d51276932f0b507b800723b1de
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mFirstRequest(false),
162      mParamHeap(NULL),
163      mParameters(NULL),
164      mJpegSettings(NULL),
165      mIsZslMode(false),
166      m_pPowerModule(NULL)
167{
168    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
169    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
170    mCameraDevice.common.close = close_camera_device;
171    mCameraDevice.ops = &mCameraOps;
172    mCameraDevice.priv = this;
173    gCamCapability[cameraId]->version = CAM_HAL_V3;
174
175    pthread_cond_init(&mRequestCond, NULL);
176    mPendingRequest = 0;
177    mCurrentRequestId = -1;
178    pthread_mutex_init(&mMutex, NULL);
179
180    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
181        mDefaultMetadata[i] = NULL;
182
183#ifdef HAS_MULTIMEDIA_HINTS
184    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
185        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
186    }
187#endif
188}
189
190/*===========================================================================
191 * FUNCTION   : ~QCamera3HardwareInterface
192 *
193 * DESCRIPTION: destructor of QCamera3HardwareInterface
194 *
195 * PARAMETERS : none
196 *
197 * RETURN     : none
198 *==========================================================================*/
199QCamera3HardwareInterface::~QCamera3HardwareInterface()
200{
201    ALOGV("%s: E", __func__);
202    /* We need to stop all streams before deleting any stream */
203    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
204        it != mStreamInfo.end(); it++) {
205        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
206        if (channel)
207            channel->stop();
208    }
209    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
210        it != mStreamInfo.end(); it++) {
211        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
212        if (channel)
213            delete channel;
214        free (*it);
215    }
216
217    if (mJpegSettings != NULL) {
218        free(mJpegSettings);
219        mJpegSettings = NULL;
220    }
221
222    /* Clean up all channels */
223    if (mCameraInitialized) {
224        mMetadataChannel->stop();
225        delete mMetadataChannel;
226        mMetadataChannel = NULL;
227        deinitParameters();
228    }
229
230    if (mCameraOpened)
231        closeCamera();
232
233    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
234        if (mDefaultMetadata[i])
235            free_camera_metadata(mDefaultMetadata[i]);
236
237    pthread_cond_destroy(&mRequestCond);
238
239    pthread_mutex_destroy(&mMutex);
240    ALOGV("%s: X", __func__);
241}
242
243/*===========================================================================
244 * FUNCTION   : openCamera
245 *
246 * DESCRIPTION: open camera
247 *
248 * PARAMETERS :
249 *   @hw_device  : double ptr for camera device struct
250 *
251 * RETURN     : int32_t type of status
252 *              NO_ERROR  -- success
253 *              none-zero failure code
254 *==========================================================================*/
255int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
256{
257    int rc = 0;
258    pthread_mutex_lock(&mCameraSessionLock);
259    if (mCameraSessionActive) {
260        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
261        pthread_mutex_unlock(&mCameraSessionLock);
262        return INVALID_OPERATION;
263    }
264
265    if (mCameraOpened) {
266        *hw_device = NULL;
267        return PERMISSION_DENIED;
268    }
269
270    rc = openCamera();
271    if (rc == 0) {
272        *hw_device = &mCameraDevice.common;
273        mCameraSessionActive = 1;
274    } else
275        *hw_device = NULL;
276
277#ifdef HAS_MULTIMEDIA_HINTS
278    if (rc == 0) {
279        if (m_pPowerModule) {
280            if (m_pPowerModule->powerHint) {
281                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
282                        (void *)"state=1");
283            }
284        }
285    }
286#endif
287    pthread_mutex_unlock(&mCameraSessionLock);
288    return rc;
289}
290
291/*===========================================================================
292 * FUNCTION   : openCamera
293 *
294 * DESCRIPTION: open camera
295 *
296 * PARAMETERS : none
297 *
298 * RETURN     : int32_t type of status
299 *              NO_ERROR  -- success
300 *              none-zero failure code
301 *==========================================================================*/
302int QCamera3HardwareInterface::openCamera()
303{
304    if (mCameraHandle) {
305        ALOGE("Failure: Camera already opened");
306        return ALREADY_EXISTS;
307    }
308    mCameraHandle = camera_open(mCameraId);
309    if (!mCameraHandle) {
310        ALOGE("camera_open failed.");
311        return UNKNOWN_ERROR;
312    }
313
314    mCameraOpened = true;
315
316    return NO_ERROR;
317}
318
319/*===========================================================================
320 * FUNCTION   : closeCamera
321 *
322 * DESCRIPTION: close camera
323 *
324 * PARAMETERS : none
325 *
326 * RETURN     : int32_t type of status
327 *              NO_ERROR  -- success
328 *              none-zero failure code
329 *==========================================================================*/
330int QCamera3HardwareInterface::closeCamera()
331{
332    int rc = NO_ERROR;
333
334    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
335    mCameraHandle = NULL;
336    mCameraOpened = false;
337
338#ifdef HAS_MULTIMEDIA_HINTS
339    if (rc == NO_ERROR) {
340        if (m_pPowerModule) {
341            if (m_pPowerModule->powerHint) {
342                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
343                        (void *)"state=0");
344            }
345        }
346    }
347#endif
348
349    return rc;
350}
351
352/*===========================================================================
353 * FUNCTION   : initialize
354 *
355 * DESCRIPTION: Initialize frameworks callback functions
356 *
357 * PARAMETERS :
358 *   @callback_ops : callback function to frameworks
359 *
360 * RETURN     :
361 *
362 *==========================================================================*/
363int QCamera3HardwareInterface::initialize(
364        const struct camera3_callback_ops *callback_ops)
365{
366    int rc;
367
368    pthread_mutex_lock(&mMutex);
369
370    rc = initParameters();
371    if (rc < 0) {
372        ALOGE("%s: initParamters failed %d", __func__, rc);
373       goto err1;
374    }
375    //Create metadata channel and initialize it
376    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
377                    mCameraHandle->ops, captureResultCb,
378                    &gCamCapability[mCameraId]->padding_info, this);
379    if (mMetadataChannel == NULL) {
380        ALOGE("%s: failed to allocate metadata channel", __func__);
381        rc = -ENOMEM;
382        goto err2;
383    }
384    rc = mMetadataChannel->initialize();
385    if (rc < 0) {
386        ALOGE("%s: metadata channel initialization failed", __func__);
387        goto err3;
388    }
389
390    mCallbackOps = callback_ops;
391
392    pthread_mutex_unlock(&mMutex);
393    mCameraInitialized = true;
394    return 0;
395
396err3:
397    delete mMetadataChannel;
398    mMetadataChannel = NULL;
399err2:
400    deinitParameters();
401err1:
402    pthread_mutex_unlock(&mMutex);
403    return rc;
404}
405
406/*===========================================================================
407 * FUNCTION   : configureStreams
408 *
409 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
410 *              and output streams.
411 *
412 * PARAMETERS :
413 *   @stream_list : streams to be configured
414 *
415 * RETURN     :
416 *
417 *==========================================================================*/
418int QCamera3HardwareInterface::configureStreams(
419        camera3_stream_configuration_t *streamList)
420{
421    int rc = 0;
422    pthread_mutex_lock(&mMutex);
423    // Sanity check stream_list
424    if (streamList == NULL) {
425        ALOGE("%s: NULL stream configuration", __func__);
426        pthread_mutex_unlock(&mMutex);
427        return BAD_VALUE;
428    }
429
430    if (streamList->streams == NULL) {
431        ALOGE("%s: NULL stream list", __func__);
432        pthread_mutex_unlock(&mMutex);
433        return BAD_VALUE;
434    }
435
436    if (streamList->num_streams < 1) {
437        ALOGE("%s: Bad number of streams requested: %d", __func__,
438                streamList->num_streams);
439        pthread_mutex_unlock(&mMutex);
440        return BAD_VALUE;
441    }
442
443    camera3_stream_t *inputStream = NULL;
444    camera3_stream_t *jpegStream = NULL;
445    /* first invalidate all the steams in the mStreamList
446     * if they appear again, they will be validated */
447    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
448            it != mStreamInfo.end(); it++) {
449        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
450        channel->stop();
451        (*it)->status = INVALID;
452    }
453
454    for (size_t i = 0; i < streamList->num_streams; i++) {
455        camera3_stream_t *newStream = streamList->streams[i];
456        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
457                __func__, newStream->stream_type, newStream->format,
458                 newStream->width, newStream->height);
459        //if the stream is in the mStreamList validate it
460        bool stream_exists = false;
461        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
462                it != mStreamInfo.end(); it++) {
463            if ((*it)->stream == newStream) {
464                QCamera3Channel *channel =
465                    (QCamera3Channel*)(*it)->stream->priv;
466                stream_exists = true;
467                (*it)->status = RECONFIGURE;
468                /*delete the channel object associated with the stream because
469                  we need to reconfigure*/
470                delete channel;
471                (*it)->stream->priv = NULL;
472            }
473        }
474        if (!stream_exists) {
475            //new stream
476            stream_info_t* stream_info;
477            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
478            stream_info->stream = newStream;
479            stream_info->status = VALID;
480            stream_info->registered = 0;
481            mStreamInfo.push_back(stream_info);
482        }
483        if (newStream->stream_type == CAMERA3_STREAM_INPUT
484                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
485            if (inputStream != NULL) {
486                ALOGE("%s: Multiple input streams requested!", __func__);
487                pthread_mutex_unlock(&mMutex);
488                return BAD_VALUE;
489            }
490            inputStream = newStream;
491        }
492        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
493            jpegStream = newStream;
494        }
495    }
496    mInputStream = inputStream;
497
498    /*clean up invalid streams*/
499    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
500            it != mStreamInfo.end();) {
501        if(((*it)->status) == INVALID){
502            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
503            delete channel;
504            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
505            free(*it);
506            it = mStreamInfo.erase(it);
507        } else {
508            it++;
509        }
510    }
511
512    //mMetadataChannel->stop();
513
514    /* Allocate channel objects for the requested streams */
515    for (size_t i = 0; i < streamList->num_streams; i++) {
516        camera3_stream_t *newStream = streamList->streams[i];
517        if (newStream->priv == NULL) {
518            //New stream, construct channel
519            switch (newStream->stream_type) {
520            case CAMERA3_STREAM_INPUT:
521                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
522                break;
523            case CAMERA3_STREAM_BIDIRECTIONAL:
524                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
525                    GRALLOC_USAGE_HW_CAMERA_WRITE;
526                break;
527            case CAMERA3_STREAM_OUTPUT:
528                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
529                break;
530            default:
531                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
532                break;
533            }
534
535            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
536                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
537                QCamera3Channel *channel;
538                switch (newStream->format) {
539                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
540                case HAL_PIXEL_FORMAT_YCbCr_420_888:
541                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
542                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
543                        jpegStream) {
544                        uint32_t width = jpegStream->width;
545                        uint32_t height = jpegStream->height;
546                        mIsZslMode = true;
547                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
548                            mCameraHandle->ops, captureResultCb,
549                            &gCamCapability[mCameraId]->padding_info, this, newStream,
550                            width, height);
551                    } else
552                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
553                            mCameraHandle->ops, captureResultCb,
554                            &gCamCapability[mCameraId]->padding_info, this, newStream);
555                    if (channel == NULL) {
556                        ALOGE("%s: allocation of channel failed", __func__);
557                        pthread_mutex_unlock(&mMutex);
558                        return -ENOMEM;
559                    }
560
561                    newStream->priv = channel;
562                    break;
563                case HAL_PIXEL_FORMAT_BLOB:
564                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
565                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
566                            mCameraHandle->ops, captureResultCb,
567                            &gCamCapability[mCameraId]->padding_info, this, newStream);
568                    if (channel == NULL) {
569                        ALOGE("%s: allocation of channel failed", __func__);
570                        pthread_mutex_unlock(&mMutex);
571                        return -ENOMEM;
572                    }
573                    newStream->priv = channel;
574                    break;
575
576                //TODO: Add support for app consumed format?
577                default:
578                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
579                    break;
580                }
581            }
582        } else {
583            // Channel already exists for this stream
584            // Do nothing for now
585        }
586    }
587    /*For the streams to be reconfigured we need to register the buffers
588      since the framework wont*/
589    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
590            it != mStreamInfo.end(); it++) {
591        if ((*it)->status == RECONFIGURE) {
592            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
593            /*only register buffers for streams that have already been
594              registered*/
595            if ((*it)->registered) {
596                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
597                        (*it)->buffer_set.buffers);
598                if (rc != NO_ERROR) {
599                    ALOGE("%s: Failed to register the buffers of old stream,\
600                            rc = %d", __func__, rc);
601                }
602                ALOGV("%s: channel %p has %d buffers",
603                        __func__, channel, (*it)->buffer_set.num_buffers);
604            }
605        }
606
607        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
608        if (index == NAME_NOT_FOUND) {
609            mPendingBuffersMap.add((*it)->stream, 0);
610        } else {
611            mPendingBuffersMap.editValueAt(index) = 0;
612        }
613    }
614
615    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
616    mPendingRequestsList.clear();
617
618    //settings/parameters don't carry over for new configureStreams
619    memset(mParameters, 0, sizeof(parm_buffer_t));
620    mFirstRequest = true;
621
622    pthread_mutex_unlock(&mMutex);
623    return rc;
624}
625
626/*===========================================================================
627 * FUNCTION   : validateCaptureRequest
628 *
629 * DESCRIPTION: validate a capture request from camera service
630 *
631 * PARAMETERS :
632 *   @request : request from framework to process
633 *
634 * RETURN     :
635 *
636 *==========================================================================*/
637int QCamera3HardwareInterface::validateCaptureRequest(
638                    camera3_capture_request_t *request)
639{
640    ssize_t idx = 0;
641    const camera3_stream_buffer_t *b;
642    CameraMetadata meta;
643
644    /* Sanity check the request */
645    if (request == NULL) {
646        ALOGE("%s: NULL capture request", __func__);
647        return BAD_VALUE;
648    }
649
650    uint32_t frameNumber = request->frame_number;
651    if (request->input_buffer != NULL &&
652            request->input_buffer->stream != mInputStream) {
653        ALOGE("%s: Request %d: Input buffer not from input stream!",
654                __FUNCTION__, frameNumber);
655        return BAD_VALUE;
656    }
657    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
658        ALOGE("%s: Request %d: No output buffers provided!",
659                __FUNCTION__, frameNumber);
660        return BAD_VALUE;
661    }
662    if (request->input_buffer != NULL) {
663        b = request->input_buffer;
664        QCamera3Channel *channel =
665            static_cast<QCamera3Channel*>(b->stream->priv);
666        if (channel == NULL) {
667            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
668                    __func__, frameNumber, idx);
669            return BAD_VALUE;
670        }
671        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
672            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
673                    __func__, frameNumber, idx);
674            return BAD_VALUE;
675        }
676        if (b->release_fence != -1) {
677            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
678                    __func__, frameNumber, idx);
679            return BAD_VALUE;
680        }
681        if (b->buffer == NULL) {
682            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
683                    __func__, frameNumber, idx);
684            return BAD_VALUE;
685        }
686    }
687
688    // Validate all buffers
689    b = request->output_buffers;
690    do {
691        QCamera3Channel *channel =
692                static_cast<QCamera3Channel*>(b->stream->priv);
693        if (channel == NULL) {
694            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
695                    __func__, frameNumber, idx);
696            return BAD_VALUE;
697        }
698        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
699            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
700                    __func__, frameNumber, idx);
701            return BAD_VALUE;
702        }
703        if (b->release_fence != -1) {
704            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
705                    __func__, frameNumber, idx);
706            return BAD_VALUE;
707        }
708        if (b->buffer == NULL) {
709            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
710                    __func__, frameNumber, idx);
711            return BAD_VALUE;
712        }
713        idx++;
714        b = request->output_buffers + idx;
715    } while (idx < (ssize_t)request->num_output_buffers);
716
717    return NO_ERROR;
718}
719
720/*===========================================================================
721 * FUNCTION   : registerStreamBuffers
722 *
723 * DESCRIPTION: Register buffers for a given stream with the HAL device.
724 *
725 * PARAMETERS :
726 *   @stream_list : streams to be configured
727 *
728 * RETURN     :
729 *
730 *==========================================================================*/
731int QCamera3HardwareInterface::registerStreamBuffers(
732        const camera3_stream_buffer_set_t *buffer_set)
733{
734    int rc = 0;
735
736    pthread_mutex_lock(&mMutex);
737
738    if (buffer_set == NULL) {
739        ALOGE("%s: Invalid buffer_set parameter.", __func__);
740        pthread_mutex_unlock(&mMutex);
741        return -EINVAL;
742    }
743    if (buffer_set->stream == NULL) {
744        ALOGE("%s: Invalid stream parameter.", __func__);
745        pthread_mutex_unlock(&mMutex);
746        return -EINVAL;
747    }
748    if (buffer_set->num_buffers < 1) {
749        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
750        pthread_mutex_unlock(&mMutex);
751        return -EINVAL;
752    }
753    if (buffer_set->buffers == NULL) {
754        ALOGE("%s: Invalid buffers parameter.", __func__);
755        pthread_mutex_unlock(&mMutex);
756        return -EINVAL;
757    }
758
759    camera3_stream_t *stream = buffer_set->stream;
760    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
761
762    //set the buffer_set in the mStreamInfo array
763    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
764            it != mStreamInfo.end(); it++) {
765        if ((*it)->stream == stream) {
766            uint32_t numBuffers = buffer_set->num_buffers;
767            (*it)->buffer_set.stream = buffer_set->stream;
768            (*it)->buffer_set.num_buffers = numBuffers;
769            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
770            if ((*it)->buffer_set.buffers == NULL) {
771                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
772                pthread_mutex_unlock(&mMutex);
773                return -ENOMEM;
774            }
775            for (size_t j = 0; j < numBuffers; j++){
776                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
777            }
778            (*it)->registered = 1;
779        }
780    }
781    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
782    if (rc < 0) {
783        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
784        pthread_mutex_unlock(&mMutex);
785        return -ENODEV;
786    }
787
788    pthread_mutex_unlock(&mMutex);
789    return NO_ERROR;
790}
791
792/*===========================================================================
793 * FUNCTION   : processCaptureRequest
794 *
795 * DESCRIPTION: process a capture request from camera service
796 *
797 * PARAMETERS :
798 *   @request : request from framework to process
799 *
800 * RETURN     :
801 *
802 *==========================================================================*/
803int QCamera3HardwareInterface::processCaptureRequest(
804                    camera3_capture_request_t *request)
805{
806    int rc = NO_ERROR;
807    int32_t request_id;
808    CameraMetadata meta;
809
810    pthread_mutex_lock(&mMutex);
811
812    rc = validateCaptureRequest(request);
813    if (rc != NO_ERROR) {
814        ALOGE("%s: incoming request is not valid", __func__);
815        pthread_mutex_unlock(&mMutex);
816        return rc;
817    }
818
819    uint32_t frameNumber = request->frame_number;
820    rc = setFrameParameters(request->frame_number, request->settings);
821    if (rc < 0) {
822        ALOGE("%s: fail to set frame parameters", __func__);
823        pthread_mutex_unlock(&mMutex);
824        return rc;
825    }
826
827    meta = request->settings;
828    if (meta.exists(ANDROID_REQUEST_ID)) {
829        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
830        mCurrentRequestId = request_id;
831        ALOGV("%s: Received request with id: %d",__func__, request_id);
832    } else if (mFirstRequest || mCurrentRequestId == -1){
833        ALOGE("%s: Unable to find request id field, \
834                & no previous id available", __func__);
835        return NAME_NOT_FOUND;
836    } else {
837        ALOGV("%s: Re-using old request id", __func__);
838        request_id = mCurrentRequestId;
839    }
840
841    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
842                                    __func__, __LINE__,
843                                    request->num_output_buffers,
844                                    request->input_buffer,
845                                    frameNumber);
846    // Acquire all request buffers first
847    for (size_t i = 0; i < request->num_output_buffers; i++) {
848        const camera3_stream_buffer_t& output = request->output_buffers[i];
849        sp<Fence> acquireFence = new Fence(output.acquire_fence);
850
851        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
852        //Call function to store local copy of jpeg data for encode params.
853            rc = getJpegSettings(request->settings);
854            if (rc < 0) {
855                ALOGE("%s: failed to get jpeg parameters", __func__);
856                pthread_mutex_unlock(&mMutex);
857                return rc;
858            }
859        }
860
861        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
862        if (rc != OK) {
863            ALOGE("%s: fence wait failed %d", __func__, rc);
864            pthread_mutex_unlock(&mMutex);
865            return rc;
866        }
867    }
868
869    /* Update pending request list and pending buffers map */
870    PendingRequestInfo pendingRequest;
871    pendingRequest.frame_number = frameNumber;
872    pendingRequest.num_buffers = request->num_output_buffers;
873    pendingRequest.request_id = request_id;
874
875    for (size_t i = 0; i < request->num_output_buffers; i++) {
876        RequestedBufferInfo requestedBuf;
877        requestedBuf.stream = request->output_buffers[i].stream;
878        requestedBuf.buffer = NULL;
879        pendingRequest.buffers.push_back(requestedBuf);
880
881        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
882    }
883    mPendingRequestsList.push_back(pendingRequest);
884
885    // Notify metadata channel we receive a request
886    mMetadataChannel->request(NULL, frameNumber);
887
888    // Call request on other streams
889    for (size_t i = 0; i < request->num_output_buffers; i++) {
890        const camera3_stream_buffer_t& output = request->output_buffers[i];
891        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
892        mm_camera_buf_def_t *pInputBuffer = NULL;
893
894        if (channel == NULL) {
895            ALOGE("%s: invalid channel pointer for stream", __func__);
896            continue;
897        }
898
899        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
900            QCamera3RegularChannel* inputChannel = NULL;
901            if(request->input_buffer != NULL){
902
903                //Try to get the internal format
904                inputChannel = (QCamera3RegularChannel*)
905                    request->input_buffer->stream->priv;
906                if(inputChannel == NULL ){
907                    ALOGE("%s: failed to get input channel handle", __func__);
908                } else {
909                    pInputBuffer =
910                        inputChannel->getInternalFormatBuffer(
911                                request->input_buffer->buffer);
912                    ALOGD("%s: Input buffer dump",__func__);
913                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
914                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
915                    ALOGD("frame len:%d", pInputBuffer->frame_len);
916                }
917            }
918            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
919                            pInputBuffer,(QCamera3Channel*)inputChannel);
920        } else {
921            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
922                __LINE__, output.buffer, frameNumber);
923            rc = channel->request(output.buffer, frameNumber);
924        }
925        if (rc < 0)
926            ALOGE("%s: request failed", __func__);
927    }
928
929    mFirstRequest = false;
930
931    //Block on conditional variable
932    mPendingRequest = 1;
933    while (mPendingRequest == 1) {
934        pthread_cond_wait(&mRequestCond, &mMutex);
935    }
936
937    pthread_mutex_unlock(&mMutex);
938    return rc;
939}
940
941/*===========================================================================
942 * FUNCTION   : getMetadataVendorTagOps
943 *
944 * DESCRIPTION:
945 *
946 * PARAMETERS :
947 *
948 *
949 * RETURN     :
950 *==========================================================================*/
951void QCamera3HardwareInterface::getMetadataVendorTagOps(
952                    vendor_tag_query_ops_t* /*ops*/)
953{
954    /* Enable locks when we eventually add Vendor Tags */
955    /*
956    pthread_mutex_lock(&mMutex);
957
958    pthread_mutex_unlock(&mMutex);
959    */
960    return;
961}
962
963/*===========================================================================
964 * FUNCTION   : dump
965 *
966 * DESCRIPTION:
967 *
968 * PARAMETERS :
969 *
970 *
971 * RETURN     :
972 *==========================================================================*/
973void QCamera3HardwareInterface::dump(int /*fd*/)
974{
975    /*Enable lock when we implement this function*/
976    /*
977    pthread_mutex_lock(&mMutex);
978
979    pthread_mutex_unlock(&mMutex);
980    */
981    return;
982}
983
984/*===========================================================================
985 * FUNCTION   : captureResultCb
986 *
987 * DESCRIPTION: Callback handler for all capture result
988 *              (streams, as well as metadata)
989 *
990 * PARAMETERS :
991 *   @metadata : metadata information
992 *   @buffer   : actual gralloc buffer to be returned to frameworks.
993 *               NULL if metadata.
994 *
995 * RETURN     : NONE
996 *==========================================================================*/
997void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
998                camera3_stream_buffer_t *buffer, uint32_t frame_number)
999{
1000    pthread_mutex_lock(&mMutex);
1001
1002    if (metadata_buf) {
1003        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1004        int32_t frame_number_valid = *(int32_t *)
1005            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1006        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1007            CAM_INTF_META_PENDING_REQUESTS, metadata);
1008        uint32_t frame_number = *(uint32_t *)
1009            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1010        const struct timeval *tv = (const struct timeval *)
1011            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1012        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1013            tv->tv_usec * NSEC_PER_USEC;
1014
1015        if (!frame_number_valid) {
1016            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1017            mMetadataChannel->bufDone(metadata_buf);
1018            goto done_metadata;
1019        }
1020        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1021                frame_number, capture_time);
1022
1023        // Go through the pending requests info and send shutter/results to frameworks
1024        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1025                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1026            camera3_capture_result_t result;
1027            camera3_notify_msg_t notify_msg;
1028            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1029
1030            // Flush out all entries with less or equal frame numbers.
1031
1032            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1033            //Right now it's the same as metadata timestamp
1034
1035            //TODO: When there is metadata drop, how do we derive the timestamp of
1036            //dropped frames? For now, we fake the dropped timestamp by substracting
1037            //from the reported timestamp
1038            nsecs_t current_capture_time = capture_time -
1039                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1040
1041            // Send shutter notify to frameworks
1042            notify_msg.type = CAMERA3_MSG_SHUTTER;
1043            notify_msg.message.shutter.frame_number = i->frame_number;
1044            notify_msg.message.shutter.timestamp = current_capture_time;
1045            mCallbackOps->notify(mCallbackOps, &notify_msg);
1046            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1047                    i->frame_number, capture_time);
1048
1049            // Send empty metadata with already filled buffers for dropped metadata
1050            // and send valid metadata with already filled buffers for current metadata
1051            if (i->frame_number < frame_number) {
1052                CameraMetadata dummyMetadata;
1053                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1054                        &current_capture_time, 1);
1055                dummyMetadata.update(ANDROID_REQUEST_ID,
1056                        &(i->request_id), 1);
1057                result.result = dummyMetadata.release();
1058            } else {
1059                result.result = translateCbMetadataToResultMetadata(metadata,
1060                        current_capture_time, i->request_id);
1061                // Return metadata buffer
1062                mMetadataChannel->bufDone(metadata_buf);
1063            }
1064            if (!result.result) {
1065                ALOGE("%s: metadata is NULL", __func__);
1066            }
1067            result.frame_number = i->frame_number;
1068            result.num_output_buffers = 0;
1069            result.output_buffers = NULL;
1070            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1071                    j != i->buffers.end(); j++) {
1072                if (j->buffer) {
1073                    result.num_output_buffers++;
1074                }
1075            }
1076
1077            if (result.num_output_buffers > 0) {
1078                camera3_stream_buffer_t *result_buffers =
1079                    new camera3_stream_buffer_t[result.num_output_buffers];
1080                if (!result_buffers) {
1081                    ALOGE("%s: Fatal error: out of memory", __func__);
1082                }
1083                size_t result_buffers_idx = 0;
1084                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1085                        j != i->buffers.end(); j++) {
1086                    if (j->buffer) {
1087                        result_buffers[result_buffers_idx++] = *(j->buffer);
1088                        free(j->buffer);
1089                        j->buffer = NULL;
1090                        mPendingBuffersMap.editValueFor(j->stream)--;
1091                    }
1092                }
1093                result.output_buffers = result_buffers;
1094
1095                mCallbackOps->process_capture_result(mCallbackOps, &result);
1096                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1097                        __func__, result.frame_number, current_capture_time);
1098                free_camera_metadata((camera_metadata_t *)result.result);
1099                delete[] result_buffers;
1100            } else {
1101                mCallbackOps->process_capture_result(mCallbackOps, &result);
1102                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1103                        __func__, result.frame_number, current_capture_time);
1104                free_camera_metadata((camera_metadata_t *)result.result);
1105            }
1106            // erase the element from the list
1107            i = mPendingRequestsList.erase(i);
1108        }
1109
1110
1111done_metadata:
1112        bool max_buffers_dequeued = false;
1113        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1114            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1115            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1116            if (queued_buffers == stream->max_buffers) {
1117                max_buffers_dequeued = true;
1118                break;
1119            }
1120        }
1121        if (!max_buffers_dequeued && !pending_requests) {
1122            // Unblock process_capture_request
1123            mPendingRequest = 0;
1124            pthread_cond_signal(&mRequestCond);
1125        }
1126    } else {
1127        // If the frame number doesn't exist in the pending request list,
1128        // directly send the buffer to the frameworks, and update pending buffers map
1129        // Otherwise, book-keep the buffer.
1130        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1131        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1132            i++;
1133        }
1134        if (i == mPendingRequestsList.end()) {
1135            // Verify all pending requests frame_numbers are greater
1136            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1137                    j != mPendingRequestsList.end(); j++) {
1138                if (j->frame_number < frame_number) {
1139                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1140                            __func__, j->frame_number, frame_number);
1141                }
1142            }
1143            camera3_capture_result_t result;
1144            result.result = NULL;
1145            result.frame_number = frame_number;
1146            result.num_output_buffers = 1;
1147            result.output_buffers = buffer;
1148            ALOGV("%s: result frame_number = %d, buffer = %p",
1149                    __func__, frame_number, buffer);
1150            mPendingBuffersMap.editValueFor(buffer->stream)--;
1151            mCallbackOps->process_capture_result(mCallbackOps, &result);
1152        } else {
1153            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1154                    j != i->buffers.end(); j++) {
1155                if (j->stream == buffer->stream) {
1156                    if (j->buffer != NULL) {
1157                        ALOGE("%s: Error: buffer is already set", __func__);
1158                    } else {
1159                        j->buffer = (camera3_stream_buffer_t *)malloc(
1160                                sizeof(camera3_stream_buffer_t));
1161                        *(j->buffer) = *buffer;
1162                        ALOGV("%s: cache buffer %p at result frame_number %d",
1163                                __func__, buffer, frame_number);
1164                    }
1165                }
1166            }
1167        }
1168    }
1169
1170    pthread_mutex_unlock(&mMutex);
1171    return;
1172}
1173
1174/*===========================================================================
1175 * FUNCTION   : translateCbMetadataToResultMetadata
1176 *
1177 * DESCRIPTION:
1178 *
1179 * PARAMETERS :
1180 *   @metadata : metadata information from callback
1181 *
1182 * RETURN     : camera_metadata_t*
1183 *              metadata in a format specified by fwk
1184 *==========================================================================*/
1185camera_metadata_t*
1186QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1187                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1188                                 int32_t request_id)
1189{
1190    CameraMetadata camMetadata;
1191    camera_metadata_t* resultMetadata;
1192
1193    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1194    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1195
1196    /*CAM_INTF_META_HISTOGRAM - TODO*/
1197    /*cam_hist_stats_t  *histogram =
1198      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1199      metadata);*/
1200
1201    /*face detection*/
1202    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1203        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1204    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1205    int32_t faceIds[numFaces];
1206    uint8_t faceScores[numFaces];
1207    int32_t faceRectangles[numFaces * 4];
1208    int32_t faceLandmarks[numFaces * 6];
1209    int j = 0, k = 0;
1210    for (int i = 0; i < numFaces; i++) {
1211        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1212        faceScores[i] = faceDetectionInfo->faces[i].score;
1213        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1214                faceRectangles+j, -1);
1215        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1216        j+= 4;
1217        k+= 6;
1218    }
1219    if (numFaces > 0) {
1220        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1221        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1222        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1223            faceRectangles, numFaces*4);
1224        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1225            faceLandmarks, numFaces*6);
1226    }
1227
1228    uint8_t  *color_correct_mode =
1229        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1230    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1231
1232    int32_t  *ae_precapture_id =
1233        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1234    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1235
1236    /*aec regions*/
1237    cam_area_t  *hAeRegions =
1238        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1239    int32_t aeRegions[5];
1240    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1241    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1242   if(mIsZslMode) {
1243        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1244        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1245    } else {
1246        uint8_t *ae_state =
1247            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1248        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1249   }
1250    uint8_t  *focusMode =
1251        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1252    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1253
1254    /*af regions*/
1255    cam_area_t  *hAfRegions =
1256        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1257    int32_t afRegions[5];
1258    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1259    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1260
1261    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1262    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1263
1264    int32_t  *afTriggerId =
1265        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1266    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1267
1268    uint8_t  *whiteBalance =
1269        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1270    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1271
1272    /*awb regions*/
1273    cam_area_t  *hAwbRegions =
1274        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1275    int32_t awbRegions[5];
1276    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1277    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1278
1279    uint8_t  *whiteBalanceState =
1280        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1281    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1282
1283    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1284    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1285
1286    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1287    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1288
1289    uint8_t  *flashPower =
1290        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1291    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1292
1293    int64_t  *flashFiringTime =
1294        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1295    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1296
1297    /*int32_t  *ledMode =
1298      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1299      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1300
1301    uint8_t  *flashState =
1302        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1303    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1304
1305    uint8_t  *hotPixelMode =
1306        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1307    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1308
1309    float  *lensAperture =
1310        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1311    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1312
1313    float  *filterDensity =
1314        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1315    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1316
1317    float  *focalLength =
1318        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1319    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1320
1321    float  *focusDistance =
1322        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1323    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1324
1325    float  *focusRange =
1326        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1327    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1328
1329    uint8_t  *opticalStab =
1330        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1331    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1332
1333    /*int32_t  *focusState =
1334      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1335      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1336
1337    uint8_t  *noiseRedMode =
1338        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1339    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1340
1341    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1342
1343    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1344        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1345    int32_t scalerCropRegion[4];
1346    scalerCropRegion[0] = hScalerCropRegion->left;
1347    scalerCropRegion[1] = hScalerCropRegion->top;
1348    scalerCropRegion[2] = hScalerCropRegion->width;
1349    scalerCropRegion[3] = hScalerCropRegion->height;
1350    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1351
1352    int64_t  *sensorExpTime =
1353        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1354    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1355
1356    int64_t  *sensorFameDuration =
1357        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1358    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1359
1360    int32_t  *sensorSensitivity =
1361        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1362    mMetadataResponse.iso_speed = *sensorSensitivity;
1363    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1364
1365    uint8_t  *shadingMode =
1366        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1367    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1368
1369    uint8_t  *faceDetectMode =
1370        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1371    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1372
1373    uint8_t  *histogramMode =
1374        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1375    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1376
1377    uint8_t  *sharpnessMapMode =
1378        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1379    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1380            sharpnessMapMode, 1);
1381
1382    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1383    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1384        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1385    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1386            (int32_t*)sharpnessMap->sharpness,
1387            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1388
1389    resultMetadata = camMetadata.release();
1390    return resultMetadata;
1391}
1392
1393/*===========================================================================
1394 * FUNCTION   : convertToRegions
1395 *
1396 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1397 *
1398 * PARAMETERS :
1399 *   @rect   : cam_rect_t struct to convert
1400 *   @region : int32_t destination array
1401 *   @weight : if we are converting from cam_area_t, weight is valid
1402 *             else weight = -1
1403 *
1404 *==========================================================================*/
1405void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1406    region[0] = rect.left;
1407    region[1] = rect.top;
1408    region[2] = rect.left + rect.width;
1409    region[3] = rect.top + rect.height;
1410    if (weight > -1) {
1411        region[4] = weight;
1412    }
1413}
1414
1415/*===========================================================================
1416 * FUNCTION   : convertFromRegions
1417 *
1418 * DESCRIPTION: helper method to convert from array to cam_rect_t
1419 *
1420 * PARAMETERS :
1421 *   @rect   : cam_rect_t struct to convert
1422 *   @region : int32_t destination array
1423 *   @weight : if we are converting from cam_area_t, weight is valid
1424 *             else weight = -1
1425 *
1426 *==========================================================================*/
1427void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1428                                                   const camera_metadata_t *settings,
1429                                                   uint32_t tag){
1430    CameraMetadata frame_settings;
1431    frame_settings = settings;
1432    int32_t x_min = frame_settings.find(tag).data.i32[0];
1433    int32_t y_min = frame_settings.find(tag).data.i32[1];
1434    int32_t x_max = frame_settings.find(tag).data.i32[2];
1435    int32_t y_max = frame_settings.find(tag).data.i32[3];
1436    roi->weight = frame_settings.find(tag).data.i32[4];
1437    roi->rect.left = x_min;
1438    roi->rect.top = y_min;
1439    roi->rect.width = x_max - x_min;
1440    roi->rect.height = y_max - y_min;
1441}
1442
1443/*===========================================================================
1444 * FUNCTION   : resetIfNeededROI
1445 *
1446 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1447 *              crop region
1448 *
1449 * PARAMETERS :
1450 *   @roi       : cam_area_t struct to resize
1451 *   @scalerCropRegion : cam_crop_region_t region to compare against
1452 *
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1456                                                 const cam_crop_region_t* scalerCropRegion)
1457{
1458    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1459    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1460    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1461    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1462    if ((roi_x_max < scalerCropRegion->left) ||
1463        (roi_y_max < scalerCropRegion->top)  ||
1464        (roi->rect.left > crop_x_max) ||
1465        (roi->rect.top > crop_y_max)){
1466        return false;
1467    }
1468    if (roi->rect.left < scalerCropRegion->left) {
1469        roi->rect.left = scalerCropRegion->left;
1470    }
1471    if (roi->rect.top < scalerCropRegion->top) {
1472        roi->rect.top = scalerCropRegion->top;
1473    }
1474    if (roi_x_max > crop_x_max) {
1475        roi_x_max = crop_x_max;
1476    }
1477    if (roi_y_max > crop_y_max) {
1478        roi_y_max = crop_y_max;
1479    }
1480    roi->rect.width = roi_x_max - roi->rect.left;
1481    roi->rect.height = roi_y_max - roi->rect.top;
1482    return true;
1483}
1484
1485/*===========================================================================
1486 * FUNCTION   : convertLandmarks
1487 *
1488 * DESCRIPTION: helper method to extract the landmarks from face detection info
1489 *
1490 * PARAMETERS :
1491 *   @face   : cam_rect_t struct to convert
1492 *   @landmarks : int32_t destination array
1493 *
1494 *
1495 *==========================================================================*/
1496void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1497{
1498    landmarks[0] = face.left_eye_center.x;
1499    landmarks[1] = face.left_eye_center.y;
1500    landmarks[2] = face.right_eye_center.y;
1501    landmarks[3] = face.right_eye_center.y;
1502    landmarks[4] = face.mouth_center.x;
1503    landmarks[5] = face.mouth_center.y;
1504}
1505
1506#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1507/*===========================================================================
1508 * FUNCTION   : initCapabilities
1509 *
1510 * DESCRIPTION: initialize camera capabilities in static data struct
1511 *
1512 * PARAMETERS :
1513 *   @cameraId  : camera Id
1514 *
1515 * RETURN     : int32_t type of status
1516 *              NO_ERROR  -- success
1517 *              none-zero failure code
1518 *==========================================================================*/
1519int QCamera3HardwareInterface::initCapabilities(int cameraId)
1520{
1521    int rc = 0;
1522    mm_camera_vtbl_t *cameraHandle = NULL;
1523    QCamera3HeapMemory *capabilityHeap = NULL;
1524
1525    cameraHandle = camera_open(cameraId);
1526    if (!cameraHandle) {
1527        ALOGE("%s: camera_open failed", __func__);
1528        rc = -1;
1529        goto open_failed;
1530    }
1531
1532    capabilityHeap = new QCamera3HeapMemory();
1533    if (capabilityHeap == NULL) {
1534        ALOGE("%s: creation of capabilityHeap failed", __func__);
1535        goto heap_creation_failed;
1536    }
1537    /* Allocate memory for capability buffer */
1538    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1539    if(rc != OK) {
1540        ALOGE("%s: No memory for cappability", __func__);
1541        goto allocate_failed;
1542    }
1543
1544    /* Map memory for capability buffer */
1545    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1546    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1547                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1548                                capabilityHeap->getFd(0),
1549                                sizeof(cam_capability_t));
1550    if(rc < 0) {
1551        ALOGE("%s: failed to map capability buffer", __func__);
1552        goto map_failed;
1553    }
1554
1555    /* Query Capability */
1556    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1557    if(rc < 0) {
1558        ALOGE("%s: failed to query capability",__func__);
1559        goto query_failed;
1560    }
1561    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1562    if (!gCamCapability[cameraId]) {
1563        ALOGE("%s: out of memory", __func__);
1564        goto query_failed;
1565    }
1566    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1567                                        sizeof(cam_capability_t));
1568    rc = 0;
1569
1570query_failed:
1571    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1572                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1573map_failed:
1574    capabilityHeap->deallocate();
1575allocate_failed:
1576    delete capabilityHeap;
1577heap_creation_failed:
1578    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1579    cameraHandle = NULL;
1580open_failed:
1581    return rc;
1582}
1583
1584/*===========================================================================
1585 * FUNCTION   : initParameters
1586 *
1587 * DESCRIPTION: initialize camera parameters
1588 *
1589 * PARAMETERS :
1590 *
1591 * RETURN     : int32_t type of status
1592 *              NO_ERROR  -- success
1593 *              none-zero failure code
1594 *==========================================================================*/
1595int QCamera3HardwareInterface::initParameters()
1596{
1597    int rc = 0;
1598
1599    //Allocate Set Param Buffer
1600    mParamHeap = new QCamera3HeapMemory();
1601    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1602    if(rc != OK) {
1603        rc = NO_MEMORY;
1604        ALOGE("Failed to allocate SETPARM Heap memory");
1605        delete mParamHeap;
1606        mParamHeap = NULL;
1607        return rc;
1608    }
1609
1610    //Map memory for parameters buffer
1611    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1612            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1613            mParamHeap->getFd(0),
1614            sizeof(parm_buffer_t));
1615    if(rc < 0) {
1616        ALOGE("%s:failed to map SETPARM buffer",__func__);
1617        rc = FAILED_TRANSACTION;
1618        mParamHeap->deallocate();
1619        delete mParamHeap;
1620        mParamHeap = NULL;
1621        return rc;
1622    }
1623
1624    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1625    return rc;
1626}
1627
1628/*===========================================================================
1629 * FUNCTION   : deinitParameters
1630 *
1631 * DESCRIPTION: de-initialize camera parameters
1632 *
1633 * PARAMETERS :
1634 *
1635 * RETURN     : NONE
1636 *==========================================================================*/
1637void QCamera3HardwareInterface::deinitParameters()
1638{
1639    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1640            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1641
1642    mParamHeap->deallocate();
1643    delete mParamHeap;
1644    mParamHeap = NULL;
1645
1646    mParameters = NULL;
1647}
1648
1649/*===========================================================================
1650 * FUNCTION   : calcMaxJpegSize
1651 *
1652 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1653 *
1654 * PARAMETERS :
1655 *
1656 * RETURN     : max_jpeg_size
1657 *==========================================================================*/
1658int QCamera3HardwareInterface::calcMaxJpegSize()
1659{
1660    int32_t max_jpeg_size = 0;
1661    int temp_width, temp_height;
1662    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1663        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1664        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1665        if (temp_width * temp_height > max_jpeg_size ) {
1666            max_jpeg_size = temp_width * temp_height;
1667        }
1668    }
1669    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1670    return max_jpeg_size;
1671}
1672
1673/*===========================================================================
1674 * FUNCTION   : initStaticMetadata
1675 *
1676 * DESCRIPTION: initialize the static metadata
1677 *
1678 * PARAMETERS :
1679 *   @cameraId  : camera Id
1680 *
1681 * RETURN     : int32_t type of status
1682 *              0  -- success
1683 *              non-zero failure code
1684 *==========================================================================*/
1685int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1686{
1687    int rc = 0;
1688    CameraMetadata staticInfo;
1689
1690    /* android.info: hardware level */
1691    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1692    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1693        &supportedHardwareLevel, 1);
1694
1695    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1696    /*HAL 3 only*/
1697    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1698                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1699
1700    /*hard coded for now but this should come from sensor*/
1701    float min_focus_distance;
1702    if(facingBack){
1703        min_focus_distance = 10;
1704    } else {
1705        min_focus_distance = 0;
1706    }
1707    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1708                    &min_focus_distance, 1);
1709
1710    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1711                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1712
1713    /*should be using focal lengths but sensor doesn't provide that info now*/
1714    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1715                      &gCamCapability[cameraId]->focal_length,
1716                      1);
1717
1718    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1719                      gCamCapability[cameraId]->apertures,
1720                      gCamCapability[cameraId]->apertures_count);
1721
1722    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1723                gCamCapability[cameraId]->filter_densities,
1724                gCamCapability[cameraId]->filter_densities_count);
1725
1726
1727    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1728                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1729                      gCamCapability[cameraId]->optical_stab_modes_count);
1730
1731    staticInfo.update(ANDROID_LENS_POSITION,
1732                      gCamCapability[cameraId]->lens_position,
1733                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1734
1735    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1736                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1737    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1738                      lens_shading_map_size,
1739                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1740
1741    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1742                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1743    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1744            geo_correction_map_size,
1745            sizeof(geo_correction_map_size)/sizeof(int32_t));
1746
1747    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1748                       gCamCapability[cameraId]->geo_correction_map,
1749                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1750
1751    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1752            gCamCapability[cameraId]->sensor_physical_size, 2);
1753
1754    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1755            gCamCapability[cameraId]->exposure_time_range, 2);
1756
1757    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1758            &gCamCapability[cameraId]->max_frame_duration, 1);
1759
1760
1761    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1762                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1763
1764    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1765                                               gCamCapability[cameraId]->pixel_array_size.height};
1766    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1767                      pixel_array_size, 2);
1768
1769    int32_t active_array_size[] = {0, 0,
1770                                                gCamCapability[cameraId]->active_array_size.width,
1771                                                gCamCapability[cameraId]->active_array_size.height};
1772    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1773                      active_array_size, 4);
1774
1775    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1776            &gCamCapability[cameraId]->white_level, 1);
1777
1778    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1779            gCamCapability[cameraId]->black_level_pattern, 4);
1780
1781    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1782                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1783
1784    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1785                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1786
1787    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1788                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1789    /*hardcode 0 for now*/
1790    int32_t max_face_count = 0;
1791    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1792                      &max_face_count, 1);
1793
1794    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1795                      &gCamCapability[cameraId]->histogram_size, 1);
1796
1797    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1798            &gCamCapability[cameraId]->max_histogram_count, 1);
1799
1800    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1801                                                gCamCapability[cameraId]->sharpness_map_size.height};
1802
1803    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1804            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1805
1806    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1807            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1808
1809
1810    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1811                      &gCamCapability[cameraId]->raw_min_duration,
1812                       1);
1813
1814    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1815                                                HAL_PIXEL_FORMAT_BLOB};
1816    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1817    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1818                      scalar_formats,
1819                      scalar_formats_count);
1820
1821    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1822    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1823              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1824              available_processed_sizes);
1825    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1826                available_processed_sizes,
1827                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1828
1829    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1830    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1831                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1832                 available_fps_ranges);
1833    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1834            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1835
1836    camera_metadata_rational exposureCompensationStep = {
1837            gCamCapability[cameraId]->exp_compensation_step.numerator,
1838            gCamCapability[cameraId]->exp_compensation_step.denominator};
1839    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1840                      &exposureCompensationStep, 1);
1841
1842    /*TO DO*/
1843    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1844    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1845                      availableVstabModes, sizeof(availableVstabModes));
1846
1847    /*HAL 1 and HAL 3 common*/
1848    float maxZoom = 4;
1849    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1850            &maxZoom, 1);
1851
1852    int32_t max3aRegions = 1;
1853    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1854            &max3aRegions, 1);
1855
1856    uint8_t availableFaceDetectModes[] = {
1857            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1858    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1859                      availableFaceDetectModes,
1860                      sizeof(availableFaceDetectModes));
1861
1862    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1863                                       gCamCapability[cameraId]->raw_dim.height};
1864    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1865                      raw_size,
1866                      sizeof(raw_size)/sizeof(uint32_t));
1867
1868    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1869                                                        gCamCapability[cameraId]->exposure_compensation_max};
1870    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1871            exposureCompensationRange,
1872            sizeof(exposureCompensationRange)/sizeof(int32_t));
1873
1874    uint8_t lensFacing = (facingBack) ?
1875            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1876    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1877
1878    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1879    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1880              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1881              available_jpeg_sizes);
1882    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1883                available_jpeg_sizes,
1884                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1885
1886    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1887                      available_thumbnail_sizes,
1888                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1889
1890    int32_t max_jpeg_size = 0;
1891    int temp_width, temp_height;
1892    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1893        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1894        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1895        if (temp_width * temp_height > max_jpeg_size ) {
1896            max_jpeg_size = temp_width * temp_height;
1897        }
1898    }
1899    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1900    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1901                      &max_jpeg_size, 1);
1902
1903    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1904    int32_t size = 0;
1905    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1906        int val = lookupFwkName(EFFECT_MODES_MAP,
1907                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1908                                   gCamCapability[cameraId]->supported_effects[i]);
1909        if (val != NAME_NOT_FOUND) {
1910            avail_effects[size] = (uint8_t)val;
1911            size++;
1912        }
1913    }
1914    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1915                      avail_effects,
1916                      size);
1917
1918    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1919    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1920    int32_t supported_scene_modes_cnt = 0;
1921    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1922        int val = lookupFwkName(SCENE_MODES_MAP,
1923                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1924                                gCamCapability[cameraId]->supported_scene_modes[i]);
1925        if (val != NAME_NOT_FOUND) {
1926            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1927            supported_indexes[supported_scene_modes_cnt] = i;
1928            supported_scene_modes_cnt++;
1929        }
1930    }
1931
1932    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1933                      avail_scene_modes,
1934                      supported_scene_modes_cnt);
1935
1936    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1937    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1938                      supported_scene_modes_cnt,
1939                      scene_mode_overrides,
1940                      supported_indexes,
1941                      cameraId);
1942    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1943                      scene_mode_overrides,
1944                      supported_scene_modes_cnt*3);
1945
1946    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1947    size = 0;
1948    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1949        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1950                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1951                                 gCamCapability[cameraId]->supported_antibandings[i]);
1952        if (val != NAME_NOT_FOUND) {
1953            avail_antibanding_modes[size] = (uint8_t)val;
1954            size++;
1955        }
1956
1957    }
1958    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1959                      avail_antibanding_modes,
1960                      size);
1961
1962    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1963    size = 0;
1964    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1965        int val = lookupFwkName(FOCUS_MODES_MAP,
1966                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1967                                gCamCapability[cameraId]->supported_focus_modes[i]);
1968        if (val != NAME_NOT_FOUND) {
1969            avail_af_modes[size] = (uint8_t)val;
1970            size++;
1971        }
1972    }
1973    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1974                      avail_af_modes,
1975                      size);
1976
1977    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1978    size = 0;
1979    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1980        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1981                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1982                                    gCamCapability[cameraId]->supported_white_balances[i]);
1983        if (val != NAME_NOT_FOUND) {
1984            avail_awb_modes[size] = (uint8_t)val;
1985            size++;
1986        }
1987    }
1988    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1989                      avail_awb_modes,
1990                      size);
1991
1992    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1993    size = 0;
1994    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1995        int val = lookupFwkName(FLASH_MODES_MAP,
1996                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1997                                gCamCapability[cameraId]->supported_flash_modes[i]);
1998        if (val != NAME_NOT_FOUND) {
1999            avail_flash_modes[size] = (uint8_t)val;
2000            size++;
2001        }
2002    }
2003    static uint8_t flashAvailable = 0;
2004    if (size > 1) {
2005        //flash is supported
2006        flashAvailable = 1;
2007    }
2008    staticInfo.update(ANDROID_FLASH_MODE,
2009                      avail_flash_modes,
2010                      size);
2011
2012    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2013            &flashAvailable, 1);
2014
2015    uint8_t avail_ae_modes[5];
2016    size = 0;
2017    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2018        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2019        size++;
2020    }
2021    if (flashAvailable) {
2022        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2023        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2024        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2025    }
2026    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2027                      avail_ae_modes,
2028                      size);
2029
2030    int32_t min = INT_MAX, max = INT_MIN;
2031    for (int i = 0; i < gCamCapability[cameraId]->supported_iso_modes_cnt; i++) {
2032        int32_t sensitivity = getSensorSensitivity(gCamCapability[cameraId]->supported_iso_modes[i]);
2033        if (sensitivity != -1) {
2034            min = (sensitivity >= min) ? min : sensitivity;
2035            max = (sensitivity <= max) ? max : sensitivity;
2036        }
2037    }
2038    int32_t sensitivity_range[] = {min, max};
2039    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2040                      sensitivity_range,
2041                      sizeof(sensitivity_range) / sizeof(int32_t));
2042
2043    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2044                      &gCamCapability[cameraId]->max_analog_sensitivity,
2045                      sizeof(int32_t) );
2046    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2047                      &gCamCapability[cameraId]->processed_min_duration,
2048                      sizeof(int32_t));
2049    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2050                      &gCamCapability[cameraId]->jpeg_min_duration,
2051                      sizeof(int32_t));
2052
2053    gStaticMetadata[cameraId] = staticInfo.release();
2054    return rc;
2055}
2056
2057/*===========================================================================
2058 * FUNCTION   : makeTable
2059 *
2060 * DESCRIPTION: make a table of sizes
2061 *
2062 * PARAMETERS :
2063 *
2064 *
2065 *==========================================================================*/
2066void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2067                                          int32_t* sizeTable)
2068{
2069    int j = 0;
2070    for (int i = 0; i < size; i++) {
2071        sizeTable[j] = dimTable[i].width;
2072        sizeTable[j+1] = dimTable[i].height;
2073        j+=2;
2074    }
2075}
2076
2077/*===========================================================================
2078 * FUNCTION   : makeFPSTable
2079 *
2080 * DESCRIPTION: make a table of fps ranges
2081 *
2082 * PARAMETERS :
2083 *
2084 *==========================================================================*/
2085void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2086                                          int32_t* fpsRangesTable)
2087{
2088    int j = 0;
2089    for (int i = 0; i < size; i++) {
2090        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2091        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2092        j+=2;
2093    }
2094}
2095
2096/*===========================================================================
2097 * FUNCTION   : makeOverridesList
2098 *
2099 * DESCRIPTION: make a list of scene mode overrides
2100 *
2101 * PARAMETERS :
2102 *
2103 *
2104 *==========================================================================*/
2105void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2106                                                  uint8_t size, uint8_t* overridesList,
2107                                                  uint8_t* supported_indexes,
2108                                                  int camera_id)
2109{
2110    /*daemon will give a list of overrides for all scene modes.
2111      However we should send the fwk only the overrides for the scene modes
2112      supported by the framework*/
2113    int j = 0, index = 0, supt = 0;
2114    uint8_t focus_override;
2115    for (int i = 0; i < size; i++) {
2116        supt = 0;
2117        index = supported_indexes[i];
2118        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2119        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2120                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2121                                                    overridesTable[index].awb_mode);
2122        focus_override = (uint8_t)overridesTable[index].af_mode;
2123        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2124           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2125              supt = 1;
2126              break;
2127           }
2128        }
2129        if (supt) {
2130           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2131                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2132                                              focus_override);
2133        } else {
2134           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2135        }
2136        j+=3;
2137    }
2138}
2139
2140/*===========================================================================
2141 * FUNCTION   : getPreviewHalPixelFormat
2142 *
2143 * DESCRIPTION: convert the format to type recognized by framework
2144 *
2145 * PARAMETERS : format : the format from backend
2146 *
2147 ** RETURN    : format recognized by framework
2148 *
2149 *==========================================================================*/
2150int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2151{
2152    int32_t halPixelFormat;
2153
2154    switch (format) {
2155    case CAM_FORMAT_YUV_420_NV12:
2156        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2157        break;
2158    case CAM_FORMAT_YUV_420_NV21:
2159        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2160        break;
2161    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2162        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2163        break;
2164    case CAM_FORMAT_YUV_420_YV12:
2165        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2166        break;
2167    case CAM_FORMAT_YUV_422_NV16:
2168    case CAM_FORMAT_YUV_422_NV61:
2169    default:
2170        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2171        break;
2172    }
2173    return halPixelFormat;
2174}
2175
2176/*===========================================================================
2177 * FUNCTION   : getSensorSensitivity
2178 *
2179 * DESCRIPTION: convert iso_mode to an integer value
2180 *
2181 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2182 *
2183 ** RETURN    : sensitivity supported by sensor
2184 *
2185 *==========================================================================*/
2186int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2187{
2188    int32_t sensitivity;
2189
2190    switch (iso_mode) {
2191    case CAM_ISO_MODE_100:
2192        sensitivity = 100;
2193        break;
2194    case CAM_ISO_MODE_200:
2195        sensitivity = 200;
2196        break;
2197    case CAM_ISO_MODE_400:
2198        sensitivity = 400;
2199        break;
2200    case CAM_ISO_MODE_800:
2201        sensitivity = 800;
2202        break;
2203    case CAM_ISO_MODE_1600:
2204        sensitivity = 1600;
2205        break;
2206    default:
2207        sensitivity = -1;
2208        break;
2209    }
2210    return sensitivity;
2211}
2212
2213
2214/*===========================================================================
2215 * FUNCTION   : AddSetParmEntryToBatch
2216 *
2217 * DESCRIPTION: add set parameter entry into batch
2218 *
2219 * PARAMETERS :
2220 *   @p_table     : ptr to parameter buffer
2221 *   @paramType   : parameter type
2222 *   @paramLength : length of parameter value
2223 *   @paramValue  : ptr to parameter value
2224 *
2225 * RETURN     : int32_t type of status
2226 *              NO_ERROR  -- success
2227 *              none-zero failure code
2228 *==========================================================================*/
2229int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2230                                                          cam_intf_parm_type_t paramType,
2231                                                          uint32_t paramLength,
2232                                                          void *paramValue)
2233{
2234    int position = paramType;
2235    int current, next;
2236
2237    /*************************************************************************
2238    *                 Code to take care of linking next flags                *
2239    *************************************************************************/
2240    current = GET_FIRST_PARAM_ID(p_table);
2241    if (position == current){
2242        //DO NOTHING
2243    } else if (position < current){
2244        SET_NEXT_PARAM_ID(position, p_table, current);
2245        SET_FIRST_PARAM_ID(p_table, position);
2246    } else {
2247        /* Search for the position in the linked list where we need to slot in*/
2248        while (position > GET_NEXT_PARAM_ID(current, p_table))
2249            current = GET_NEXT_PARAM_ID(current, p_table);
2250
2251        /*If node already exists no need to alter linking*/
2252        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2253            next = GET_NEXT_PARAM_ID(current, p_table);
2254            SET_NEXT_PARAM_ID(current, p_table, position);
2255            SET_NEXT_PARAM_ID(position, p_table, next);
2256        }
2257    }
2258
2259    /*************************************************************************
2260    *                   Copy contents into entry                             *
2261    *************************************************************************/
2262
2263    if (paramLength > sizeof(parm_type_t)) {
2264        ALOGE("%s:Size of input larger than max entry size",__func__);
2265        return BAD_VALUE;
2266    }
2267    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2268    return NO_ERROR;
2269}
2270
2271/*===========================================================================
2272 * FUNCTION   : lookupFwkName
2273 *
2274 * DESCRIPTION: In case the enum is not same in fwk and backend
2275 *              make sure the parameter is correctly propogated
2276 *
2277 * PARAMETERS  :
2278 *   @arr      : map between the two enums
2279 *   @len      : len of the map
2280 *   @hal_name : name of the hal_parm to map
2281 *
2282 * RETURN     : int type of status
2283 *              fwk_name  -- success
2284 *              none-zero failure code
2285 *==========================================================================*/
2286int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2287                                             int len, int hal_name)
2288{
2289
2290    for (int i = 0; i < len; i++) {
2291        if (arr[i].hal_name == hal_name)
2292            return arr[i].fwk_name;
2293    }
2294
2295    /* Not able to find matching framework type is not necessarily
2296     * an error case. This happens when mm-camera supports more attributes
2297     * than the frameworks do */
2298    ALOGD("%s: Cannot find matching framework type", __func__);
2299    return NAME_NOT_FOUND;
2300}
2301
2302/*===========================================================================
2303 * FUNCTION   : lookupHalName
2304 *
2305 * DESCRIPTION: In case the enum is not same in fwk and backend
2306 *              make sure the parameter is correctly propogated
2307 *
2308 * PARAMETERS  :
2309 *   @arr      : map between the two enums
2310 *   @len      : len of the map
2311 *   @fwk_name : name of the hal_parm to map
2312 *
2313 * RETURN     : int32_t type of status
2314 *              hal_name  -- success
2315 *              none-zero failure code
2316 *==========================================================================*/
2317int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2318                                             int len, int fwk_name)
2319{
2320    for (int i = 0; i < len; i++) {
2321       if (arr[i].fwk_name == fwk_name)
2322           return arr[i].hal_name;
2323    }
2324    ALOGE("%s: Cannot find matching hal type", __func__);
2325    return NAME_NOT_FOUND;
2326}
2327
2328/*===========================================================================
2329 * FUNCTION   : getCapabilities
2330 *
2331 * DESCRIPTION: query camera capabilities
2332 *
2333 * PARAMETERS :
2334 *   @cameraId  : camera Id
2335 *   @info      : camera info struct to be filled in with camera capabilities
2336 *
2337 * RETURN     : int32_t type of status
2338 *              NO_ERROR  -- success
2339 *              none-zero failure code
2340 *==========================================================================*/
2341int QCamera3HardwareInterface::getCamInfo(int cameraId,
2342                                    struct camera_info *info)
2343{
2344    int rc = 0;
2345
2346    if (NULL == gCamCapability[cameraId]) {
2347        rc = initCapabilities(cameraId);
2348        if (rc < 0) {
2349            //pthread_mutex_unlock(&g_camlock);
2350            return rc;
2351        }
2352    }
2353
2354    if (NULL == gStaticMetadata[cameraId]) {
2355        rc = initStaticMetadata(cameraId);
2356        if (rc < 0) {
2357            return rc;
2358        }
2359    }
2360
2361    switch(gCamCapability[cameraId]->position) {
2362    case CAM_POSITION_BACK:
2363        info->facing = CAMERA_FACING_BACK;
2364        break;
2365
2366    case CAM_POSITION_FRONT:
2367        info->facing = CAMERA_FACING_FRONT;
2368        break;
2369
2370    default:
2371        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2372        rc = -1;
2373        break;
2374    }
2375
2376
2377    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2378    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2379    info->static_camera_characteristics = gStaticMetadata[cameraId];
2380
2381    return rc;
2382}
2383
2384/*===========================================================================
2385 * FUNCTION   : translateMetadata
2386 *
2387 * DESCRIPTION: translate the metadata into camera_metadata_t
2388 *
2389 * PARAMETERS : type of the request
2390 *
2391 *
2392 * RETURN     : success: camera_metadata_t*
2393 *              failure: NULL
2394 *
2395 *==========================================================================*/
2396camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2397{
2398    pthread_mutex_lock(&mMutex);
2399
2400    if (mDefaultMetadata[type] != NULL) {
2401        pthread_mutex_unlock(&mMutex);
2402        return mDefaultMetadata[type];
2403    }
2404    //first time we are handling this request
2405    //fill up the metadata structure using the wrapper class
2406    CameraMetadata settings;
2407    //translate from cam_capability_t to camera_metadata_tag_t
2408    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2409    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2410
2411    /*control*/
2412
2413    uint8_t controlIntent = 0;
2414    switch (type) {
2415      case CAMERA3_TEMPLATE_PREVIEW:
2416        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2417        break;
2418      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2419        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2420        break;
2421      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2422        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2423        break;
2424      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2425        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2426        break;
2427      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2428        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2429        break;
2430      default:
2431        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2432        break;
2433    }
2434    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2435
2436    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2437            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2438
2439    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2440    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2441
2442    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2443    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2444
2445    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2446    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2447
2448    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2449    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2450
2451    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2452    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2453
2454    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2455    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2456
2457    static uint8_t focusMode;
2458    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2459        ALOGE("%s: Setting focus mode to auto", __func__);
2460        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2461    } else {
2462        ALOGE("%s: Setting focus mode to off", __func__);
2463        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2464    }
2465    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2466
2467    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2468    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2469
2470    /*flash*/
2471    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2472    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2473
2474
2475    /* lens */
2476    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2477    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2478
2479    if (gCamCapability[mCameraId]->filter_densities_count) {
2480        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2481        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2482                        gCamCapability[mCameraId]->filter_densities_count);
2483    }
2484
2485    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2486    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2487
2488    mDefaultMetadata[type] = settings.release();
2489
2490    pthread_mutex_unlock(&mMutex);
2491    return mDefaultMetadata[type];
2492}
2493
2494/*===========================================================================
2495 * FUNCTION   : setFrameParameters
2496 *
2497 * DESCRIPTION: set parameters per frame as requested in the metadata from
2498 *              framework
2499 *
2500 * PARAMETERS :
2501 *   @settings  : frame settings information from framework
2502 *
2503 *
2504 * RETURN     : success: NO_ERROR
2505 *              failure:
2506 *==========================================================================*/
2507int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2508                                                  const camera_metadata_t *settings)
2509{
2510    /*translate from camera_metadata_t type to parm_type_t*/
2511    int rc = 0;
2512    if (settings == NULL && mFirstRequest) {
2513        /*settings cannot be null for the first request*/
2514        return BAD_VALUE;
2515    }
2516
2517    int32_t hal_version = CAM_HAL_V3;
2518
2519    memset(mParameters, 0, sizeof(parm_buffer_t));
2520    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2521    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2522                sizeof(hal_version), &hal_version);
2523
2524    /*we need to update the frame number in the parameters*/
2525    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2526                                sizeof(frame_id), &frame_id);
2527    if (rc < 0) {
2528        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2529        return BAD_VALUE;
2530    }
2531
2532    if(settings != NULL){
2533        rc = translateMetadataToParameters(settings);
2534    }
2535    /*set the parameters to backend*/
2536    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2537    return rc;
2538}
2539
2540/*===========================================================================
2541 * FUNCTION   : translateMetadataToParameters
2542 *
2543 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2544 *
2545 *
2546 * PARAMETERS :
2547 *   @settings  : frame settings information from framework
2548 *
2549 *
2550 * RETURN     : success: NO_ERROR
2551 *              failure:
2552 *==========================================================================*/
2553int QCamera3HardwareInterface::translateMetadataToParameters
2554                                  (const camera_metadata_t *settings)
2555{
2556    int rc = 0;
2557    CameraMetadata frame_settings;
2558    frame_settings = settings;
2559
2560
2561    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2562        int32_t antibandingMode =
2563            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2564        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2565                sizeof(antibandingMode), &antibandingMode);
2566    }
2567
2568    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2569        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2570        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2571          sizeof(expCompensation), &expCompensation);
2572    }
2573
2574    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2575        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2576        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2577                sizeof(aeLock), &aeLock);
2578    }
2579    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2580        cam_fps_range_t fps_range;
2581        fps_range.min_fps =
2582            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2583        fps_range.max_fps =
2584            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2585        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2586                sizeof(fps_range), &fps_range);
2587    }
2588
2589    float focalDistance = -1.0;
2590    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2591        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2592        rc = AddSetParmEntryToBatch(mParameters,
2593                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2594                sizeof(focalDistance), &focalDistance);
2595    }
2596
2597    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2598        uint8_t fwk_focusMode =
2599            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2600        uint8_t focusMode;
2601        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2602            focusMode = CAM_FOCUS_MODE_INFINITY;
2603        } else{
2604         focusMode = lookupHalName(FOCUS_MODES_MAP,
2605                                   sizeof(FOCUS_MODES_MAP),
2606                                   fwk_focusMode);
2607        }
2608        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2609                sizeof(focusMode), &focusMode);
2610    }
2611
2612    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2613        uint8_t awbLock =
2614            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2615        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2616                sizeof(awbLock), &awbLock);
2617    }
2618
2619    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2620        uint8_t fwk_whiteLevel =
2621            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2622        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2623                sizeof(WHITE_BALANCE_MODES_MAP),
2624                fwk_whiteLevel);
2625        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2626                sizeof(whiteLevel), &whiteLevel);
2627    }
2628
2629    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2630        uint8_t fwk_effectMode =
2631            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2632        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2633                sizeof(EFFECT_MODES_MAP),
2634                fwk_effectMode);
2635        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2636                sizeof(effectMode), &effectMode);
2637    }
2638
2639    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2640        uint8_t fwk_aeMode =
2641            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2642        uint8_t aeMode;
2643        int32_t redeye;
2644        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2645            aeMode = CAM_AE_MODE_OFF;
2646        } else {
2647            aeMode = CAM_AE_MODE_ON;
2648        }
2649        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2650            redeye = 1;
2651        } else {
2652            redeye = 0;
2653        }
2654        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2655                                          sizeof(AE_FLASH_MODE_MAP),
2656                                          fwk_aeMode);
2657        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2658                sizeof(aeMode), &aeMode);
2659        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2660                sizeof(flashMode), &flashMode);
2661        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2662                sizeof(redeye), &redeye);
2663    }
2664
2665    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2666        uint8_t colorCorrectMode =
2667            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2668        rc =
2669            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2670                    sizeof(colorCorrectMode), &colorCorrectMode);
2671    }
2672    cam_trigger_t aecTrigger;
2673    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2674    aecTrigger.trigger_id = -1;
2675    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2676        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2677        aecTrigger.trigger =
2678            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2679        aecTrigger.trigger_id =
2680            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2681    }
2682    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2683                                sizeof(aecTrigger), &aecTrigger);
2684
2685    /*af_trigger must come with a trigger id*/
2686    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2687        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2688        cam_trigger_t af_trigger;
2689        af_trigger.trigger =
2690            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2691        af_trigger.trigger_id =
2692            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2693        rc = AddSetParmEntryToBatch(mParameters,
2694                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2695    }
2696
2697    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2698        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2699        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2700                sizeof(metaMode), &metaMode);
2701        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2702           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2703           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2704                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2705                                             fwk_sceneMode);
2706           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2707                sizeof(sceneMode), &sceneMode);
2708        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2709           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2710           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2711                sizeof(sceneMode), &sceneMode);
2712        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2713           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2714           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2715                sizeof(sceneMode), &sceneMode);
2716        }
2717    }
2718
2719    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2720        int32_t demosaic =
2721            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2722        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2723                sizeof(demosaic), &demosaic);
2724    }
2725
2726    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2727        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2728        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2729                sizeof(edgeMode), &edgeMode);
2730    }
2731
2732    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2733        int32_t edgeStrength =
2734            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2735        rc = AddSetParmEntryToBatch(mParameters,
2736                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2737    }
2738
2739    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2740        uint8_t flashMode =
2741            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2742        rc = AddSetParmEntryToBatch(mParameters,
2743                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2744    }
2745
2746    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2747        uint8_t flashPower =
2748            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2749        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2750                sizeof(flashPower), &flashPower);
2751    }
2752
2753    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2754        int64_t flashFiringTime =
2755            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2756        rc = AddSetParmEntryToBatch(mParameters,
2757                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2758    }
2759
2760    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2761        uint8_t geometricMode =
2762            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2763        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2764                sizeof(geometricMode), &geometricMode);
2765    }
2766
2767    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2768        uint8_t geometricStrength =
2769            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2770        rc = AddSetParmEntryToBatch(mParameters,
2771                CAM_INTF_META_GEOMETRIC_STRENGTH,
2772                sizeof(geometricStrength), &geometricStrength);
2773    }
2774
2775    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2776        uint8_t hotPixelMode =
2777            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2778        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2779                sizeof(hotPixelMode), &hotPixelMode);
2780    }
2781
2782    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2783        float lensAperture =
2784            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2785        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2786                sizeof(lensAperture), &lensAperture);
2787    }
2788
2789    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2790        float filterDensity =
2791            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2792        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2793                sizeof(filterDensity), &filterDensity);
2794    }
2795
2796    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2797        float focalLength =
2798            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2799        rc = AddSetParmEntryToBatch(mParameters,
2800                CAM_INTF_META_LENS_FOCAL_LENGTH,
2801                sizeof(focalLength), &focalLength);
2802    }
2803
2804    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2805        uint8_t optStabMode =
2806            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2807        rc = AddSetParmEntryToBatch(mParameters,
2808                CAM_INTF_META_LENS_OPT_STAB_MODE,
2809                sizeof(optStabMode), &optStabMode);
2810    }
2811
2812    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2813        uint8_t noiseRedMode =
2814            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2815        rc = AddSetParmEntryToBatch(mParameters,
2816                CAM_INTF_META_NOISE_REDUCTION_MODE,
2817                sizeof(noiseRedMode), &noiseRedMode);
2818    }
2819
2820    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2821        uint8_t noiseRedStrength =
2822            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2823        rc = AddSetParmEntryToBatch(mParameters,
2824                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2825                sizeof(noiseRedStrength), &noiseRedStrength);
2826    }
2827
2828    cam_crop_region_t scalerCropRegion;
2829    bool scalerCropSet = false;
2830    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2831        scalerCropRegion.left =
2832            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2833        scalerCropRegion.top =
2834            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2835        scalerCropRegion.width =
2836            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2837        scalerCropRegion.height =
2838            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2839        rc = AddSetParmEntryToBatch(mParameters,
2840                CAM_INTF_META_SCALER_CROP_REGION,
2841                sizeof(scalerCropRegion), &scalerCropRegion);
2842        scalerCropSet = true;
2843    }
2844
2845    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2846        int64_t sensorExpTime =
2847            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2848        rc = AddSetParmEntryToBatch(mParameters,
2849                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2850                sizeof(sensorExpTime), &sensorExpTime);
2851    }
2852
2853    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2854        int64_t sensorFrameDuration =
2855            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2856        rc = AddSetParmEntryToBatch(mParameters,
2857                CAM_INTF_META_SENSOR_FRAME_DURATION,
2858                sizeof(sensorFrameDuration), &sensorFrameDuration);
2859    }
2860
2861    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2862        int32_t sensorSensitivity =
2863            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2864        rc = AddSetParmEntryToBatch(mParameters,
2865                CAM_INTF_META_SENSOR_SENSITIVITY,
2866                sizeof(sensorSensitivity), &sensorSensitivity);
2867    }
2868
2869    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2870        int32_t shadingMode =
2871            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2872        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2873                sizeof(shadingMode), &shadingMode);
2874    }
2875
2876    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2877        uint8_t shadingStrength =
2878            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2879        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2880                sizeof(shadingStrength), &shadingStrength);
2881    }
2882
2883    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2884        uint8_t facedetectMode =
2885            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2886        rc = AddSetParmEntryToBatch(mParameters,
2887                CAM_INTF_META_STATS_FACEDETECT_MODE,
2888                sizeof(facedetectMode), &facedetectMode);
2889    }
2890
2891    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2892        uint8_t histogramMode =
2893            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2894        rc = AddSetParmEntryToBatch(mParameters,
2895                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2896                sizeof(histogramMode), &histogramMode);
2897    }
2898
2899    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2900        uint8_t sharpnessMapMode =
2901            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2902        rc = AddSetParmEntryToBatch(mParameters,
2903                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2904                sizeof(sharpnessMapMode), &sharpnessMapMode);
2905    }
2906
2907    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2908        uint8_t tonemapMode =
2909            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2910        rc = AddSetParmEntryToBatch(mParameters,
2911                CAM_INTF_META_TONEMAP_MODE,
2912                sizeof(tonemapMode), &tonemapMode);
2913    }
2914
2915    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2916        uint8_t captureIntent =
2917            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2918        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2919                sizeof(captureIntent), &captureIntent);
2920    }
2921
2922    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2923        cam_area_t roi;
2924        bool reset = true;
2925        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2926        if (scalerCropSet) {
2927            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2928        }
2929        if (reset) {
2930            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2931                    sizeof(roi), &roi);
2932        }
2933    }
2934
2935    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2936        cam_area_t roi;
2937        bool reset = true;
2938        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2939        if (scalerCropSet) {
2940            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2941        }
2942        if (reset) {
2943            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2944                    sizeof(roi), &roi);
2945        }
2946    }
2947
2948    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2949        cam_area_t roi;
2950        bool reset = true;
2951        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2952        if (scalerCropSet) {
2953            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2954        }
2955        if (reset) {
2956            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2957                    sizeof(roi), &roi);
2958        }
2959    }
2960    return rc;
2961}
2962
2963/*===========================================================================
2964 * FUNCTION   : getJpegSettings
2965 *
2966 * DESCRIPTION: save the jpeg settings in the HAL
2967 *
2968 *
2969 * PARAMETERS :
2970 *   @settings  : frame settings information from framework
2971 *
2972 *
2973 * RETURN     : success: NO_ERROR
2974 *              failure:
2975 *==========================================================================*/
2976int QCamera3HardwareInterface::getJpegSettings
2977                                  (const camera_metadata_t *settings)
2978{
2979    if (mJpegSettings) {
2980        if (mJpegSettings->gps_timestamp) {
2981            free(mJpegSettings->gps_timestamp);
2982            mJpegSettings->gps_timestamp = NULL;
2983        }
2984        if (mJpegSettings->gps_coordinates) {
2985            for (int i = 0; i < 3; i++) {
2986                free(mJpegSettings->gps_coordinates[i]);
2987                mJpegSettings->gps_coordinates[i] = NULL;
2988            }
2989        }
2990        free(mJpegSettings);
2991        mJpegSettings = NULL;
2992    }
2993    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2994    CameraMetadata jpeg_settings;
2995    jpeg_settings = settings;
2996
2997    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2998        mJpegSettings->jpeg_orientation =
2999            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3000    } else {
3001        mJpegSettings->jpeg_orientation = 0;
3002    }
3003    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3004        mJpegSettings->jpeg_quality =
3005            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3006    } else {
3007        mJpegSettings->jpeg_quality = 85;
3008    }
3009    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3010        mJpegSettings->thumbnail_size.width =
3011            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3012        mJpegSettings->thumbnail_size.height =
3013            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3014    } else {
3015        mJpegSettings->thumbnail_size.width = 0;
3016        mJpegSettings->thumbnail_size.height = 0;
3017    }
3018    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3019        for (int i = 0; i < 3; i++) {
3020            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3021            *(mJpegSettings->gps_coordinates[i]) =
3022                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3023        }
3024    } else{
3025       for (int i = 0; i < 3; i++) {
3026            mJpegSettings->gps_coordinates[i] = NULL;
3027        }
3028    }
3029
3030    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3031        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3032        *(mJpegSettings->gps_timestamp) =
3033            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3034    } else {
3035        mJpegSettings->gps_timestamp = NULL;
3036    }
3037
3038    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3039        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3040        for (int i = 0; i < len; i++) {
3041            mJpegSettings->gps_processing_method[i] =
3042                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3043        }
3044        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3045            mJpegSettings->gps_processing_method[len] = '\0';
3046        }
3047    } else {
3048        mJpegSettings->gps_processing_method[0] = '\0';
3049    }
3050
3051    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3052        mJpegSettings->sensor_sensitivity =
3053            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3054    } else {
3055        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3056    }
3057
3058    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3059        mJpegSettings->lens_focal_length =
3060            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3061    }
3062    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3063        mJpegSettings->exposure_compensation =
3064            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3065    }
3066    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3067    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3068    return 0;
3069}
3070
3071/*===========================================================================
3072 * FUNCTION   : captureResultCb
3073 *
3074 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3075 *
3076 * PARAMETERS :
3077 *   @frame  : frame information from mm-camera-interface
3078 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3079 *   @userdata: userdata
3080 *
3081 * RETURN     : NONE
3082 *==========================================================================*/
3083void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3084                camera3_stream_buffer_t *buffer,
3085                uint32_t frame_number, void *userdata)
3086{
3087    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3088    if (hw == NULL) {
3089        ALOGE("%s: Invalid hw %p", __func__, hw);
3090        return;
3091    }
3092
3093    hw->captureResultCb(metadata, buffer, frame_number);
3094    return;
3095}
3096
3097/*===========================================================================
3098 * FUNCTION   : initialize
3099 *
3100 * DESCRIPTION: Pass framework callback pointers to HAL
3101 *
3102 * PARAMETERS :
3103 *
3104 *
3105 * RETURN     : Success : 0
3106 *              Failure: -ENODEV
3107 *==========================================================================*/
3108
3109int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3110                                  const camera3_callback_ops_t *callback_ops)
3111{
3112    ALOGV("%s: E", __func__);
3113    QCamera3HardwareInterface *hw =
3114        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3115    if (!hw) {
3116        ALOGE("%s: NULL camera device", __func__);
3117        return -ENODEV;
3118    }
3119
3120    int rc = hw->initialize(callback_ops);
3121    ALOGV("%s: X", __func__);
3122    return rc;
3123}
3124
3125/*===========================================================================
3126 * FUNCTION   : configure_streams
3127 *
3128 * DESCRIPTION:
3129 *
3130 * PARAMETERS :
3131 *
3132 *
3133 * RETURN     : Success: 0
3134 *              Failure: -EINVAL (if stream configuration is invalid)
3135 *                       -ENODEV (fatal error)
3136 *==========================================================================*/
3137
3138int QCamera3HardwareInterface::configure_streams(
3139        const struct camera3_device *device,
3140        camera3_stream_configuration_t *stream_list)
3141{
3142    ALOGV("%s: E", __func__);
3143    QCamera3HardwareInterface *hw =
3144        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3145    if (!hw) {
3146        ALOGE("%s: NULL camera device", __func__);
3147        return -ENODEV;
3148    }
3149    int rc = hw->configureStreams(stream_list);
3150    ALOGV("%s: X", __func__);
3151    return rc;
3152}
3153
3154/*===========================================================================
3155 * FUNCTION   : register_stream_buffers
3156 *
3157 * DESCRIPTION: Register stream buffers with the device
3158 *
3159 * PARAMETERS :
3160 *
3161 * RETURN     :
3162 *==========================================================================*/
3163int QCamera3HardwareInterface::register_stream_buffers(
3164        const struct camera3_device *device,
3165        const camera3_stream_buffer_set_t *buffer_set)
3166{
3167    ALOGV("%s: E", __func__);
3168    QCamera3HardwareInterface *hw =
3169        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3170    if (!hw) {
3171        ALOGE("%s: NULL camera device", __func__);
3172        return -ENODEV;
3173    }
3174    int rc = hw->registerStreamBuffers(buffer_set);
3175    ALOGV("%s: X", __func__);
3176    return rc;
3177}
3178
3179/*===========================================================================
3180 * FUNCTION   : construct_default_request_settings
3181 *
3182 * DESCRIPTION: Configure a settings buffer to meet the required use case
3183 *
3184 * PARAMETERS :
3185 *
3186 *
3187 * RETURN     : Success: Return valid metadata
3188 *              Failure: Return NULL
3189 *==========================================================================*/
3190const camera_metadata_t* QCamera3HardwareInterface::
3191    construct_default_request_settings(const struct camera3_device *device,
3192                                        int type)
3193{
3194
3195    ALOGV("%s: E", __func__);
3196    camera_metadata_t* fwk_metadata = NULL;
3197    QCamera3HardwareInterface *hw =
3198        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3199    if (!hw) {
3200        ALOGE("%s: NULL camera device", __func__);
3201        return NULL;
3202    }
3203
3204    fwk_metadata = hw->translateCapabilityToMetadata(type);
3205
3206    ALOGV("%s: X", __func__);
3207    return fwk_metadata;
3208}
3209
3210/*===========================================================================
3211 * FUNCTION   : process_capture_request
3212 *
3213 * DESCRIPTION:
3214 *
3215 * PARAMETERS :
3216 *
3217 *
3218 * RETURN     :
3219 *==========================================================================*/
3220int QCamera3HardwareInterface::process_capture_request(
3221                    const struct camera3_device *device,
3222                    camera3_capture_request_t *request)
3223{
3224    ALOGV("%s: E", __func__);
3225    QCamera3HardwareInterface *hw =
3226        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3227    if (!hw) {
3228        ALOGE("%s: NULL camera device", __func__);
3229        return -EINVAL;
3230    }
3231
3232    int rc = hw->processCaptureRequest(request);
3233    ALOGV("%s: X", __func__);
3234    return rc;
3235}
3236
3237/*===========================================================================
3238 * FUNCTION   : get_metadata_vendor_tag_ops
3239 *
3240 * DESCRIPTION:
3241 *
3242 * PARAMETERS :
3243 *
3244 *
3245 * RETURN     :
3246 *==========================================================================*/
3247
3248void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3249                const struct camera3_device *device,
3250                vendor_tag_query_ops_t* ops)
3251{
3252    ALOGV("%s: E", __func__);
3253    QCamera3HardwareInterface *hw =
3254        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3255    if (!hw) {
3256        ALOGE("%s: NULL camera device", __func__);
3257        return;
3258    }
3259
3260    hw->getMetadataVendorTagOps(ops);
3261    ALOGV("%s: X", __func__);
3262    return;
3263}
3264
3265/*===========================================================================
3266 * FUNCTION   : dump
3267 *
3268 * DESCRIPTION:
3269 *
3270 * PARAMETERS :
3271 *
3272 *
3273 * RETURN     :
3274 *==========================================================================*/
3275
3276void QCamera3HardwareInterface::dump(
3277                const struct camera3_device *device, int fd)
3278{
3279    ALOGV("%s: E", __func__);
3280    QCamera3HardwareInterface *hw =
3281        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3282    if (!hw) {
3283        ALOGE("%s: NULL camera device", __func__);
3284        return;
3285    }
3286
3287    hw->dump(fd);
3288    ALOGV("%s: X", __func__);
3289    return;
3290}
3291
3292/*===========================================================================
3293 * FUNCTION   : close_camera_device
3294 *
3295 * DESCRIPTION:
3296 *
3297 * PARAMETERS :
3298 *
3299 *
3300 * RETURN     :
3301 *==========================================================================*/
3302int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3303{
3304    ALOGV("%s: E", __func__);
3305    int ret = NO_ERROR;
3306    QCamera3HardwareInterface *hw =
3307        reinterpret_cast<QCamera3HardwareInterface *>(
3308            reinterpret_cast<camera3_device_t *>(device)->priv);
3309    if (!hw) {
3310        ALOGE("NULL camera device");
3311        return BAD_VALUE;
3312    }
3313    delete hw;
3314
3315    pthread_mutex_lock(&mCameraSessionLock);
3316    mCameraSessionActive = 0;
3317    pthread_mutex_unlock(&mCameraSessionLock);
3318    ALOGV("%s: X", __func__);
3319    return ret;
3320}
3321
3322}; //end namespace qcamera
3323