QCamera3HWI.cpp revision 827c33fdbfdbd07c5af33ecc8a2f3619f5c8fcb3
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mFirstRequest(false),
162      mParamHeap(NULL),
163      mParameters(NULL),
164      mJpegSettings(NULL),
165      m_pPowerModule(NULL)
166{
167    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
168    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
169    mCameraDevice.common.close = close_camera_device;
170    mCameraDevice.ops = &mCameraOps;
171    mCameraDevice.priv = this;
172    gCamCapability[cameraId]->version = CAM_HAL_V3;
173
174    pthread_mutex_init(&mRequestLock, NULL);
175    pthread_cond_init(&mRequestCond, NULL);
176    mPendingRequest = 0;
177    mCurrentRequestId = -1;
178
179    pthread_mutex_init(&mMutex, NULL);
180    pthread_mutex_init(&mCaptureResultLock, NULL);
181
182    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
183        mDefaultMetadata[i] = NULL;
184
185#ifdef HAS_MULTIMEDIA_HINTS
186    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
187        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
188    }
189#endif
190}
191
192/*===========================================================================
193 * FUNCTION   : ~QCamera3HardwareInterface
194 *
195 * DESCRIPTION: destructor of QCamera3HardwareInterface
196 *
197 * PARAMETERS : none
198 *
199 * RETURN     : none
200 *==========================================================================*/
201QCamera3HardwareInterface::~QCamera3HardwareInterface()
202{
203    ALOGV("%s: E", __func__);
204    /* We need to stop all streams before deleting any stream */
205    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
206        it != mStreamInfo.end(); it++) {
207        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
208        if (channel)
209            channel->stop();
210    }
211    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
212        it != mStreamInfo.end(); it++) {
213        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
214        if (channel)
215            delete channel;
216        free (*it);
217    }
218
219    if (mJpegSettings != NULL) {
220        free(mJpegSettings);
221        mJpegSettings = NULL;
222    }
223
224    /* Clean up all channels */
225    if (mCameraInitialized) {
226        mMetadataChannel->stop();
227        delete mMetadataChannel;
228        mMetadataChannel = NULL;
229        deinitParameters();
230    }
231
232    if (mCameraOpened)
233        closeCamera();
234
235    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
236        if (mDefaultMetadata[i])
237            free_camera_metadata(mDefaultMetadata[i]);
238
239    pthread_mutex_destroy(&mRequestLock);
240    pthread_cond_destroy(&mRequestCond);
241
242    pthread_mutex_destroy(&mMutex);
243    pthread_mutex_destroy(&mCaptureResultLock);
244    ALOGV("%s: X", __func__);
245}
246
247/*===========================================================================
248 * FUNCTION   : openCamera
249 *
250 * DESCRIPTION: open camera
251 *
252 * PARAMETERS :
253 *   @hw_device  : double ptr for camera device struct
254 *
255 * RETURN     : int32_t type of status
256 *              NO_ERROR  -- success
257 *              none-zero failure code
258 *==========================================================================*/
259int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
260{
261    int rc = 0;
262    pthread_mutex_lock(&mCameraSessionLock);
263    if (mCameraSessionActive) {
264        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
265        pthread_mutex_unlock(&mCameraSessionLock);
266        return INVALID_OPERATION;
267    }
268
269    if (mCameraOpened) {
270        *hw_device = NULL;
271        return PERMISSION_DENIED;
272    }
273
274    rc = openCamera();
275    if (rc == 0) {
276        *hw_device = &mCameraDevice.common;
277        mCameraSessionActive = 1;
278    } else
279        *hw_device = NULL;
280
281#ifdef HAS_MULTIMEDIA_HINTS
282    if (rc == 0) {
283        if (m_pPowerModule) {
284            if (m_pPowerModule->powerHint) {
285                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
286                        (void *)"state=1");
287            }
288        }
289    }
290#endif
291    pthread_mutex_unlock(&mCameraSessionLock);
292    return rc;
293}
294
295/*===========================================================================
296 * FUNCTION   : openCamera
297 *
298 * DESCRIPTION: open camera
299 *
300 * PARAMETERS : none
301 *
302 * RETURN     : int32_t type of status
303 *              NO_ERROR  -- success
304 *              none-zero failure code
305 *==========================================================================*/
306int QCamera3HardwareInterface::openCamera()
307{
308    if (mCameraHandle) {
309        ALOGE("Failure: Camera already opened");
310        return ALREADY_EXISTS;
311    }
312    mCameraHandle = camera_open(mCameraId);
313    if (!mCameraHandle) {
314        ALOGE("camera_open failed.");
315        return UNKNOWN_ERROR;
316    }
317
318    mCameraOpened = true;
319
320    return NO_ERROR;
321}
322
323/*===========================================================================
324 * FUNCTION   : closeCamera
325 *
326 * DESCRIPTION: close camera
327 *
328 * PARAMETERS : none
329 *
330 * RETURN     : int32_t type of status
331 *              NO_ERROR  -- success
332 *              none-zero failure code
333 *==========================================================================*/
334int QCamera3HardwareInterface::closeCamera()
335{
336    int rc = NO_ERROR;
337
338    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
339    mCameraHandle = NULL;
340    mCameraOpened = false;
341
342#ifdef HAS_MULTIMEDIA_HINTS
343    if (rc == NO_ERROR) {
344        if (m_pPowerModule) {
345            if (m_pPowerModule->powerHint) {
346                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
347                        (void *)"state=0");
348            }
349        }
350    }
351#endif
352
353    return rc;
354}
355
356/*===========================================================================
357 * FUNCTION   : initialize
358 *
359 * DESCRIPTION: Initialize frameworks callback functions
360 *
361 * PARAMETERS :
362 *   @callback_ops : callback function to frameworks
363 *
364 * RETURN     :
365 *
366 *==========================================================================*/
367int QCamera3HardwareInterface::initialize(
368        const struct camera3_callback_ops *callback_ops)
369{
370    int rc;
371
372    pthread_mutex_lock(&mMutex);
373
374    rc = initParameters();
375    if (rc < 0) {
376        ALOGE("%s: initParamters failed %d", __func__, rc);
377       goto err1;
378    }
379    //Create metadata channel and initialize it
380    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
381                    mCameraHandle->ops, captureResultCb,
382                    &gCamCapability[mCameraId]->padding_info, this);
383    if (mMetadataChannel == NULL) {
384        ALOGE("%s: failed to allocate metadata channel", __func__);
385        rc = -ENOMEM;
386        goto err2;
387    }
388    rc = mMetadataChannel->initialize();
389    if (rc < 0) {
390        ALOGE("%s: metadata channel initialization failed", __func__);
391        goto err3;
392    }
393
394    mCallbackOps = callback_ops;
395
396    pthread_mutex_unlock(&mMutex);
397    mCameraInitialized = true;
398    return 0;
399
400err3:
401    delete mMetadataChannel;
402    mMetadataChannel = NULL;
403err2:
404    deinitParameters();
405err1:
406    pthread_mutex_unlock(&mMutex);
407    return rc;
408}
409
410/*===========================================================================
411 * FUNCTION   : configureStreams
412 *
413 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
414 *              and output streams.
415 *
416 * PARAMETERS :
417 *   @stream_list : streams to be configured
418 *
419 * RETURN     :
420 *
421 *==========================================================================*/
422int QCamera3HardwareInterface::configureStreams(
423        camera3_stream_configuration_t *streamList)
424{
425    int rc = 0;
426    pthread_mutex_lock(&mMutex);
427
428    // Sanity check stream_list
429    if (streamList == NULL) {
430        ALOGE("%s: NULL stream configuration", __func__);
431        pthread_mutex_unlock(&mMutex);
432        return BAD_VALUE;
433    }
434
435    if (streamList->streams == NULL) {
436        ALOGE("%s: NULL stream list", __func__);
437        pthread_mutex_unlock(&mMutex);
438        return BAD_VALUE;
439    }
440
441    if (streamList->num_streams < 1) {
442        ALOGE("%s: Bad number of streams requested: %d", __func__,
443                streamList->num_streams);
444        pthread_mutex_unlock(&mMutex);
445        return BAD_VALUE;
446    }
447
448    camera3_stream_t *inputStream = NULL;
449    /* first invalidate all the steams in the mStreamList
450     * if they appear again, they will be validated */
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end(); it++) {
453        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454        channel->stop();
455        (*it)->status = INVALID;
456    }
457
458    for (size_t i = 0; i < streamList->num_streams; i++) {
459        camera3_stream_t *newStream = streamList->streams[i];
460        ALOGV("%s: newStream type = %d, stream format = %d",
461                __func__, newStream->stream_type, newStream->format);
462        //if the stream is in the mStreamList validate it
463        bool stream_exists = false;
464        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
465                it != mStreamInfo.end(); it++) {
466            if ((*it)->stream == newStream) {
467                QCamera3Channel *channel =
468                    (QCamera3Channel*)(*it)->stream->priv;
469                stream_exists = true;
470                (*it)->status = RECONFIGURE;
471                /*delete the channel object associated with the stream because
472                  we need to reconfigure*/
473                delete channel;
474                (*it)->stream->priv = NULL;
475            }
476        }
477        if (!stream_exists) {
478            //new stream
479            stream_info_t* stream_info;
480            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
481            stream_info->stream = newStream;
482            stream_info->status = VALID;
483            stream_info->registered = 0;
484            mStreamInfo.push_back(stream_info);
485        }
486        if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
487            if (inputStream != NULL) {
488                ALOGE("%s: Multiple input streams requested!", __func__);
489                pthread_mutex_unlock(&mMutex);
490                return BAD_VALUE;
491            }
492            inputStream = newStream;
493        }
494    }
495    mInputStream = inputStream;
496
497    /*clean up invalid streams*/
498    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
499            it != mStreamInfo.end();) {
500        if(((*it)->status) == INVALID){
501            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
502            delete channel;
503            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
504            free(*it);
505            it = mStreamInfo.erase(it);
506        } else {
507            it++;
508        }
509    }
510
511    //mMetadataChannel->stop();
512
513    /* Allocate channel objects for the requested streams */
514    for (size_t i = 0; i < streamList->num_streams; i++) {
515        camera3_stream_t *newStream = streamList->streams[i];
516        if (newStream->priv == NULL) {
517            //New stream, construct channel
518            switch (newStream->stream_type) {
519            case CAMERA3_STREAM_INPUT:
520                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
521                break;
522            case CAMERA3_STREAM_BIDIRECTIONAL:
523                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
524                    GRALLOC_USAGE_HW_CAMERA_WRITE;
525                break;
526            case CAMERA3_STREAM_OUTPUT:
527                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
528                break;
529            default:
530                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
531                break;
532            }
533
534            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
535                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
536                QCamera3Channel *channel;
537                switch (newStream->format) {
538                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
539                case HAL_PIXEL_FORMAT_YCbCr_420_888:
540                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
541                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
542                            mCameraHandle->ops, captureResultCb,
543                            &gCamCapability[mCameraId]->padding_info, this, newStream);
544                    if (channel == NULL) {
545                        ALOGE("%s: allocation of channel failed", __func__);
546                        pthread_mutex_unlock(&mMutex);
547                        return -ENOMEM;
548                    }
549
550                    newStream->priv = channel;
551                    break;
552                case HAL_PIXEL_FORMAT_BLOB:
553                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
554                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
555                            mCameraHandle->ops, captureResultCb,
556                            &gCamCapability[mCameraId]->padding_info, this, newStream);
557                    if (channel == NULL) {
558                        ALOGE("%s: allocation of channel failed", __func__);
559                        pthread_mutex_unlock(&mMutex);
560                        return -ENOMEM;
561                    }
562                    newStream->priv = channel;
563                    break;
564
565                //TODO: Add support for app consumed format?
566                default:
567                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
568                    break;
569                }
570            }
571        } else {
572            // Channel already exists for this stream
573            // Do nothing for now
574        }
575    }
576    /*For the streams to be reconfigured we need to register the buffers
577      since the framework wont*/
578    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
579            it != mStreamInfo.end(); it++) {
580        if ((*it)->status == RECONFIGURE) {
581            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
582            /*only register buffers for streams that have already been
583              registered*/
584            if ((*it)->registered) {
585                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
586                        (*it)->buffer_set.buffers);
587                if (rc != NO_ERROR) {
588                    ALOGE("%s: Failed to register the buffers of old stream,\
589                            rc = %d", __func__, rc);
590                }
591                ALOGV("%s: channel %p has %d buffers",
592                        __func__, channel, (*it)->buffer_set.num_buffers);
593            }
594        }
595
596        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
597        if (index == NAME_NOT_FOUND) {
598            mPendingBuffersMap.add((*it)->stream, 0);
599        } else {
600            mPendingBuffersMap.editValueAt(index) = 0;
601        }
602    }
603
604    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
605    mPendingRequestsList.clear();
606
607    //settings/parameters don't carry over for new configureStreams
608    memset(mParameters, 0, sizeof(parm_buffer_t));
609    mFirstRequest = true;
610
611    pthread_mutex_unlock(&mMutex);
612    return rc;
613}
614
615/*===========================================================================
616 * FUNCTION   : validateCaptureRequest
617 *
618 * DESCRIPTION: validate a capture request from camera service
619 *
620 * PARAMETERS :
621 *   @request : request from framework to process
622 *
623 * RETURN     :
624 *
625 *==========================================================================*/
626int QCamera3HardwareInterface::validateCaptureRequest(
627                    camera3_capture_request_t *request)
628{
629    ssize_t idx = 0;
630    const camera3_stream_buffer_t *b;
631    CameraMetadata meta;
632
633    /* Sanity check the request */
634    if (request == NULL) {
635        ALOGE("%s: NULL capture request", __func__);
636        return BAD_VALUE;
637    }
638
639    uint32_t frameNumber = request->frame_number;
640    if (request->input_buffer != NULL &&
641            request->input_buffer->stream != mInputStream) {
642        ALOGE("%s: Request %d: Input buffer not from input stream!",
643                __FUNCTION__, frameNumber);
644        return BAD_VALUE;
645    }
646    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
647        ALOGE("%s: Request %d: No output buffers provided!",
648                __FUNCTION__, frameNumber);
649        return BAD_VALUE;
650    }
651    if (request->input_buffer != NULL) {
652        //TODO
653        ALOGE("%s: Not supporting input buffer yet", __func__);
654        return BAD_VALUE;
655    }
656
657    // Validate all buffers
658    b = request->output_buffers;
659    do {
660        QCamera3Channel *channel =
661                static_cast<QCamera3Channel*>(b->stream->priv);
662        if (channel == NULL) {
663            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
664                    __func__, frameNumber, idx);
665            return BAD_VALUE;
666        }
667        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
668            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
669                    __func__, frameNumber, idx);
670            return BAD_VALUE;
671        }
672        if (b->release_fence != -1) {
673            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
674                    __func__, frameNumber, idx);
675            return BAD_VALUE;
676        }
677        if (b->buffer == NULL) {
678            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
679                    __func__, frameNumber, idx);
680            return BAD_VALUE;
681        }
682        idx++;
683        b = request->output_buffers + idx;
684    } while (idx < (ssize_t)request->num_output_buffers);
685
686    return NO_ERROR;
687}
688
689/*===========================================================================
690 * FUNCTION   : registerStreamBuffers
691 *
692 * DESCRIPTION: Register buffers for a given stream with the HAL device.
693 *
694 * PARAMETERS :
695 *   @stream_list : streams to be configured
696 *
697 * RETURN     :
698 *
699 *==========================================================================*/
700int QCamera3HardwareInterface::registerStreamBuffers(
701        const camera3_stream_buffer_set_t *buffer_set)
702{
703    int rc = 0;
704
705    pthread_mutex_lock(&mMutex);
706
707    if (buffer_set == NULL) {
708        ALOGE("%s: Invalid buffer_set parameter.", __func__);
709        pthread_mutex_unlock(&mMutex);
710        return -EINVAL;
711    }
712    if (buffer_set->stream == NULL) {
713        ALOGE("%s: Invalid stream parameter.", __func__);
714        pthread_mutex_unlock(&mMutex);
715        return -EINVAL;
716    }
717    if (buffer_set->num_buffers < 1) {
718        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
719        pthread_mutex_unlock(&mMutex);
720        return -EINVAL;
721    }
722    if (buffer_set->buffers == NULL) {
723        ALOGE("%s: Invalid buffers parameter.", __func__);
724        pthread_mutex_unlock(&mMutex);
725        return -EINVAL;
726    }
727
728    camera3_stream_t *stream = buffer_set->stream;
729    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
730
731    //set the buffer_set in the mStreamInfo array
732    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
733            it != mStreamInfo.end(); it++) {
734        if ((*it)->stream == stream) {
735            uint32_t numBuffers = buffer_set->num_buffers;
736            (*it)->buffer_set.stream = buffer_set->stream;
737            (*it)->buffer_set.num_buffers = numBuffers;
738            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
739            if ((*it)->buffer_set.buffers == NULL) {
740                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
741                pthread_mutex_unlock(&mMutex);
742                return -ENOMEM;
743            }
744            for (size_t j = 0; j < numBuffers; j++){
745                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
746            }
747            (*it)->registered = 1;
748        }
749    }
750
751    if (stream->stream_type != CAMERA3_STREAM_OUTPUT) {
752        ALOGE("%s: not yet support non output type stream", __func__);
753        pthread_mutex_unlock(&mMutex);
754        return -EINVAL;
755    }
756    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
757    if (rc < 0) {
758        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
759        pthread_mutex_unlock(&mMutex);
760        return -ENODEV;
761    }
762
763    pthread_mutex_unlock(&mMutex);
764    return NO_ERROR;
765}
766
767/*===========================================================================
768 * FUNCTION   : processCaptureRequest
769 *
770 * DESCRIPTION: process a capture request from camera service
771 *
772 * PARAMETERS :
773 *   @request : request from framework to process
774 *
775 * RETURN     :
776 *
777 *==========================================================================*/
778int QCamera3HardwareInterface::processCaptureRequest(
779                    camera3_capture_request_t *request)
780{
781    int rc = NO_ERROR;
782    int32_t request_id;
783    CameraMetadata meta;
784
785    pthread_mutex_lock(&mMutex);
786
787    rc = validateCaptureRequest(request);
788    if (rc != NO_ERROR) {
789        ALOGE("%s: incoming request is not valid", __func__);
790        pthread_mutex_unlock(&mMutex);
791        return rc;
792    }
793
794    uint32_t frameNumber = request->frame_number;
795
796    rc = setFrameParameters(request->frame_number, request->settings);
797    if (rc < 0) {
798        ALOGE("%s: fail to set frame parameters", __func__);
799        pthread_mutex_unlock(&mMutex);
800        return rc;
801    }
802
803    meta = request->settings;
804    if (meta.exists(ANDROID_REQUEST_ID)) {
805        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
806        mCurrentRequestId = request_id;
807        ALOGV("%s: Received request with id: %d",__func__, request_id);
808    } else if (mFirstRequest || mCurrentRequestId == -1){
809        ALOGE("%s: Unable to find request id field, \
810                & no previous id available", __func__);
811        return NAME_NOT_FOUND;
812    } else {
813        ALOGV("%s: Re-using old request id", __func__);
814        request_id = mCurrentRequestId;
815    }
816
817
818    ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__,
819                                    request->num_output_buffers);
820    // Acquire all request buffers first
821    for (size_t i = 0; i < request->num_output_buffers; i++) {
822        const camera3_stream_buffer_t& output = request->output_buffers[i];
823        sp<Fence> acquireFence = new Fence(output.acquire_fence);
824
825        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
826        //Call function to store local copy of jpeg data for encode params.
827            rc = getJpegSettings(request->settings);
828            if (rc < 0) {
829                ALOGE("%s: failed to get jpeg parameters", __func__);
830                pthread_mutex_unlock(&mMutex);
831                return rc;
832            }
833        }
834
835        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
836        if (rc != OK) {
837            ALOGE("%s: fence wait failed %d", __func__, rc);
838            pthread_mutex_unlock(&mMutex);
839            return rc;
840        }
841    }
842
843    /* Update pending request list and pending buffers map */
844    pthread_mutex_lock(&mRequestLock);
845    PendingRequestInfo pendingRequest;
846    pendingRequest.frame_number = frameNumber;
847    pendingRequest.num_buffers = request->num_output_buffers;
848    pendingRequest.request_id = request_id;
849
850    for (size_t i = 0; i < request->num_output_buffers; i++) {
851        RequestedBufferInfo requestedBuf;
852        requestedBuf.stream = request->output_buffers[i].stream;
853        requestedBuf.buffer = NULL;
854        pendingRequest.buffers.push_back(requestedBuf);
855
856        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
857    }
858    mPendingRequestsList.push_back(pendingRequest);
859    pthread_mutex_unlock(&mRequestLock);
860
861    // Notify metadata channel we receive a request
862    mMetadataChannel->request(NULL, frameNumber);
863
864    // Call request on other streams
865    for (size_t i = 0; i < request->num_output_buffers; i++) {
866        const camera3_stream_buffer_t& output = request->output_buffers[i];
867        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
868
869        if (channel == NULL) {
870            ALOGE("%s: invalid channel pointer for stream", __func__);
871            continue;
872        }
873
874        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
875            rc = channel->request(output.buffer, frameNumber, mJpegSettings);
876        } else {
877            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
878                __LINE__, output.buffer, frameNumber);
879            rc = channel->request(output.buffer, frameNumber);
880        }
881        if (rc < 0)
882            ALOGE("%s: request failed", __func__);
883    }
884
885    mFirstRequest = false;
886
887    //Block on conditional variable
888    pthread_mutex_lock(&mRequestLock);
889    mPendingRequest = 1;
890    while (mPendingRequest == 1) {
891        pthread_cond_wait(&mRequestCond, &mRequestLock);
892    }
893    pthread_mutex_unlock(&mRequestLock);
894
895    pthread_mutex_unlock(&mMutex);
896    return rc;
897}
898
899/*===========================================================================
900 * FUNCTION   : getMetadataVendorTagOps
901 *
902 * DESCRIPTION:
903 *
904 * PARAMETERS :
905 *
906 *
907 * RETURN     :
908 *==========================================================================*/
909void QCamera3HardwareInterface::getMetadataVendorTagOps(
910                    vendor_tag_query_ops_t* /*ops*/)
911{
912    /* Enable locks when we eventually add Vendor Tags */
913    /*
914    pthread_mutex_lock(&mMutex);
915
916    pthread_mutex_unlock(&mMutex);
917    */
918    return;
919}
920
921/*===========================================================================
922 * FUNCTION   : dump
923 *
924 * DESCRIPTION:
925 *
926 * PARAMETERS :
927 *
928 *
929 * RETURN     :
930 *==========================================================================*/
931void QCamera3HardwareInterface::dump(int /*fd*/)
932{
933    /*Enable lock when we implement this function*/
934    /*
935    pthread_mutex_lock(&mMutex);
936
937    pthread_mutex_unlock(&mMutex);
938    */
939    return;
940}
941
942/*===========================================================================
943 * FUNCTION   : captureResultCb
944 *
945 * DESCRIPTION: Callback handler for all capture result
946 *              (streams, as well as metadata)
947 *
948 * PARAMETERS :
949 *   @metadata : metadata information
950 *   @buffer   : actual gralloc buffer to be returned to frameworks.
951 *               NULL if metadata.
952 *
953 * RETURN     : NONE
954 *==========================================================================*/
955void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
956                camera3_stream_buffer_t *buffer, uint32_t frame_number)
957{
958    pthread_mutex_lock(&mRequestLock);
959
960    if (metadata_buf) {
961        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
962        int32_t frame_number_valid = *(int32_t *)
963            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
964        uint32_t frame_number = *(uint32_t *)
965            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
966        const struct timeval *tv = (const struct timeval *)
967            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
968        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
969            tv->tv_usec * NSEC_PER_USEC;
970
971        if (!frame_number_valid) {
972            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
973            mMetadataChannel->bufDone(metadata_buf);
974            goto done_metadata;
975        }
976        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
977                frame_number, capture_time);
978
979        // Go through the pending requests info and send shutter/results to frameworks
980        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
981                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
982            camera3_capture_result_t result;
983            camera3_notify_msg_t notify_msg;
984            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
985
986            // Flush out all entries with less or equal frame numbers.
987
988            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
989            //Right now it's the same as metadata timestamp
990
991            //TODO: When there is metadata drop, how do we derive the timestamp of
992            //dropped frames? For now, we fake the dropped timestamp by substracting
993            //from the reported timestamp
994            nsecs_t current_capture_time = capture_time -
995                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
996
997            // Send shutter notify to frameworks
998            notify_msg.type = CAMERA3_MSG_SHUTTER;
999            notify_msg.message.shutter.frame_number = i->frame_number;
1000            notify_msg.message.shutter.timestamp = current_capture_time;
1001            mCallbackOps->notify(mCallbackOps, &notify_msg);
1002            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1003                    i->frame_number, capture_time);
1004
1005            // Send empty metadata with already filled buffers for dropped metadata
1006            // and send valid metadata with already filled buffers for current metadata
1007            if (i->frame_number < frame_number) {
1008                CameraMetadata dummyMetadata;
1009                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1010                        &current_capture_time, 1);
1011                dummyMetadata.update(ANDROID_REQUEST_ID,
1012                        &(i->request_id), 1);
1013                result.result = dummyMetadata.release();
1014            } else {
1015                result.result = translateCbMetadataToResultMetadata(metadata,
1016                        current_capture_time, i->request_id);
1017                // Return metadata buffer
1018                mMetadataChannel->bufDone(metadata_buf);
1019            }
1020            if (!result.result) {
1021                ALOGE("%s: metadata is NULL", __func__);
1022            }
1023            result.frame_number = i->frame_number;
1024            result.num_output_buffers = 0;
1025            result.output_buffers = NULL;
1026            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1027                    j != i->buffers.end(); j++) {
1028                if (j->buffer) {
1029                    result.num_output_buffers++;
1030                }
1031            }
1032
1033            if (result.num_output_buffers > 0) {
1034                camera3_stream_buffer_t *result_buffers =
1035                    new camera3_stream_buffer_t[result.num_output_buffers];
1036                if (!result_buffers) {
1037                    ALOGE("%s: Fatal error: out of memory", __func__);
1038                }
1039                size_t result_buffers_idx = 0;
1040                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1041                        j != i->buffers.end(); j++) {
1042                    if (j->buffer) {
1043                        result_buffers[result_buffers_idx++] = *(j->buffer);
1044                        free(j->buffer);
1045                        mPendingBuffersMap.editValueFor(j->stream)--;
1046                    }
1047                }
1048                result.output_buffers = result_buffers;
1049
1050                mCallbackOps->process_capture_result(mCallbackOps, &result);
1051                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1052                        __func__, result.frame_number, current_capture_time);
1053                free_camera_metadata((camera_metadata_t *)result.result);
1054                delete[] result_buffers;
1055            } else {
1056                mCallbackOps->process_capture_result(mCallbackOps, &result);
1057                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1058                        __func__, result.frame_number, current_capture_time);
1059                free_camera_metadata((camera_metadata_t *)result.result);
1060            }
1061            // erase the element from the list
1062            i = mPendingRequestsList.erase(i);
1063        }
1064
1065
1066done_metadata:
1067        bool max_buffers_dequeued = false;
1068        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1069            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1070            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1071            if (queued_buffers == stream->max_buffers) {
1072                max_buffers_dequeued = true;
1073                break;
1074            }
1075        }
1076        if (!max_buffers_dequeued) {
1077            // Unblock process_capture_request
1078            mPendingRequest = 0;
1079            pthread_cond_signal(&mRequestCond);
1080        }
1081    } else {
1082        // If the frame number doesn't exist in the pending request list,
1083        // directly send the buffer to the frameworks, and update pending buffers map
1084        // Otherwise, book-keep the buffer.
1085        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1086        while (i != mPendingRequestsList.end() && i->frame_number != frame_number)
1087            i++;
1088        if (i == mPendingRequestsList.end()) {
1089            // Verify all pending requests frame_numbers are greater
1090            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1091                    j != mPendingRequestsList.end(); j++) {
1092                if (j->frame_number < frame_number) {
1093                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1094                            __func__, j->frame_number, frame_number);
1095                }
1096            }
1097            camera3_capture_result_t result;
1098            result.result = NULL;
1099            result.frame_number = frame_number;
1100            result.num_output_buffers = 1;
1101            result.output_buffers = buffer;
1102            ALOGV("%s: result frame_number = %d, buffer = %p",
1103                    __func__, frame_number, buffer);
1104            mPendingBuffersMap.editValueFor(buffer->stream)--;
1105            mCallbackOps->process_capture_result(mCallbackOps, &result);
1106        } else {
1107            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1108                    j != i->buffers.end(); j++) {
1109                if (j->stream == buffer->stream) {
1110                    if (j->buffer != NULL) {
1111                        ALOGE("%s: Error: buffer is already set", __func__);
1112                    } else {
1113                        j->buffer = (camera3_stream_buffer_t *)malloc(
1114                                sizeof(camera3_stream_buffer_t));
1115                        *(j->buffer) = *buffer;
1116                        ALOGV("%s: cache buffer %p at result frame_number %d",
1117                                __func__, buffer, frame_number);
1118                    }
1119                }
1120            }
1121        }
1122    }
1123
1124    pthread_mutex_unlock(&mRequestLock);
1125    return;
1126}
1127
1128/*===========================================================================
1129 * FUNCTION   : translateCbMetadataToResultMetadata
1130 *
1131 * DESCRIPTION:
1132 *
1133 * PARAMETERS :
1134 *   @metadata : metadata information from callback
1135 *
1136 * RETURN     : camera_metadata_t*
1137 *              metadata in a format specified by fwk
1138 *==========================================================================*/
1139camera_metadata_t*
1140QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1141                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1142                                 int32_t request_id)
1143{
1144    CameraMetadata camMetadata;
1145    camera_metadata_t* resultMetadata;
1146
1147    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1148    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1149
1150    /*CAM_INTF_META_HISTOGRAM - TODO*/
1151    /*cam_hist_stats_t  *histogram =
1152      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1153      metadata);*/
1154
1155    /*face detection*/
1156    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1157        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1158    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1159    int32_t faceIds[numFaces];
1160    uint8_t faceScores[numFaces];
1161    int32_t faceRectangles[numFaces * 4];
1162    int32_t faceLandmarks[numFaces * 6];
1163    int j = 0, k = 0;
1164    for (int i = 0; i < numFaces; i++) {
1165        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1166        faceScores[i] = faceDetectionInfo->faces[i].score;
1167        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1168                faceRectangles+j, -1);
1169        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1170        j+= 4;
1171        k+= 6;
1172    }
1173    if (numFaces > 0) {
1174        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1175        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1176        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1177            faceRectangles, numFaces*4);
1178        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1179            faceLandmarks, numFaces*6);
1180    }
1181
1182    uint8_t  *color_correct_mode =
1183        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1184    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1185
1186    int32_t  *ae_precapture_id =
1187        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1188    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1189
1190    /*aec regions*/
1191    cam_area_t  *hAeRegions =
1192        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1193    int32_t aeRegions[5];
1194    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1195    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1196
1197    uint8_t  *ae_state =
1198        (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1199    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1200
1201    uint8_t  *focusMode =
1202        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1203    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1204
1205    /*af regions*/
1206    cam_area_t  *hAfRegions =
1207        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1208    int32_t afRegions[5];
1209    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1210    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1211
1212    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1213    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1214
1215    int32_t  *afTriggerId =
1216        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1217    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1218
1219    uint8_t  *whiteBalance =
1220        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1221    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1222
1223    /*awb regions*/
1224    cam_area_t  *hAwbRegions =
1225        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1226    int32_t awbRegions[5];
1227    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1228    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1229
1230    uint8_t  *whiteBalanceState =
1231        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1232    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1233
1234    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1235    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1236
1237    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1238    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1239
1240    uint8_t  *flashPower =
1241        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1242    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1243
1244    int64_t  *flashFiringTime =
1245        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1246    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1247
1248    /*int32_t  *ledMode =
1249      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1250      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1251
1252    uint8_t  *flashState =
1253        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1254    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1255
1256    uint8_t  *hotPixelMode =
1257        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1258    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1259
1260    float  *lensAperture =
1261        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1262    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1263
1264    float  *filterDensity =
1265        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1266    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1267
1268    float  *focalLength =
1269        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1270    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1271
1272    float  *focusDistance =
1273        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1274    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1275
1276    float  *focusRange =
1277        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1278    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1279
1280    uint8_t  *opticalStab =
1281        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1282    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1283
1284    /*int32_t  *focusState =
1285      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1286      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1287
1288    uint8_t  *noiseRedMode =
1289        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1290    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1291
1292    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1293
1294    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1295        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1296    int32_t scalerCropRegion[4];
1297    scalerCropRegion[0] = hScalerCropRegion->left;
1298    scalerCropRegion[1] = hScalerCropRegion->top;
1299    scalerCropRegion[2] = hScalerCropRegion->width;
1300    scalerCropRegion[3] = hScalerCropRegion->height;
1301    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1302
1303    int64_t  *sensorExpTime =
1304        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1305    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1306
1307    int64_t  *sensorFameDuration =
1308        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1309    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1310
1311    int32_t  *sensorSensitivity =
1312        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1313    mMetadataResponse.iso_speed = *sensorSensitivity;
1314    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1315
1316    uint8_t  *shadingMode =
1317        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1318    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1319
1320    uint8_t  *faceDetectMode =
1321        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1322    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1323
1324    uint8_t  *histogramMode =
1325        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1326    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1327
1328    uint8_t  *sharpnessMapMode =
1329        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1330    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1331            sharpnessMapMode, 1);
1332
1333    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1334    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1335        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1336    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1337            (int32_t*)sharpnessMap->sharpness,
1338            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1339
1340    resultMetadata = camMetadata.release();
1341    return resultMetadata;
1342}
1343
1344/*===========================================================================
1345 * FUNCTION   : convertToRegions
1346 *
1347 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1348 *
1349 * PARAMETERS :
1350 *   @rect   : cam_rect_t struct to convert
1351 *   @region : int32_t destination array
1352 *   @weight : if we are converting from cam_area_t, weight is valid
1353 *             else weight = -1
1354 *
1355 *==========================================================================*/
1356void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1357    region[0] = rect.left;
1358    region[1] = rect.top;
1359    region[2] = rect.left + rect.width;
1360    region[3] = rect.top + rect.height;
1361    if (weight > -1) {
1362        region[4] = weight;
1363    }
1364}
1365
1366/*===========================================================================
1367 * FUNCTION   : convertFromRegions
1368 *
1369 * DESCRIPTION: helper method to convert from array to cam_rect_t
1370 *
1371 * PARAMETERS :
1372 *   @rect   : cam_rect_t struct to convert
1373 *   @region : int32_t destination array
1374 *   @weight : if we are converting from cam_area_t, weight is valid
1375 *             else weight = -1
1376 *
1377 *==========================================================================*/
1378void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1379                                                   const camera_metadata_t *settings,
1380                                                   uint32_t tag){
1381    CameraMetadata frame_settings;
1382    frame_settings = settings;
1383    int32_t x_min = frame_settings.find(tag).data.i32[0];
1384    int32_t y_min = frame_settings.find(tag).data.i32[1];
1385    int32_t x_max = frame_settings.find(tag).data.i32[2];
1386    int32_t y_max = frame_settings.find(tag).data.i32[3];
1387    roi->weight = frame_settings.find(tag).data.i32[4];
1388    roi->rect.left = x_min;
1389    roi->rect.top = y_min;
1390    roi->rect.width = x_max - x_min;
1391    roi->rect.height = y_max - y_min;
1392}
1393
1394/*===========================================================================
1395 * FUNCTION   : resetIfNeededROI
1396 *
1397 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1398 *              crop region
1399 *
1400 * PARAMETERS :
1401 *   @roi       : cam_area_t struct to resize
1402 *   @scalerCropRegion : cam_crop_region_t region to compare against
1403 *
1404 *
1405 *==========================================================================*/
1406bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1407                                                 const cam_crop_region_t* scalerCropRegion)
1408{
1409    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1410    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1411    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1412    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1413    if ((roi_x_max < scalerCropRegion->left) ||
1414        (roi_y_max < scalerCropRegion->top)  ||
1415        (roi->rect.left > crop_x_max) ||
1416        (roi->rect.top > crop_y_max)){
1417        return false;
1418    }
1419    if (roi->rect.left < scalerCropRegion->left) {
1420        roi->rect.left = scalerCropRegion->left;
1421    }
1422    if (roi->rect.top < scalerCropRegion->top) {
1423        roi->rect.top = scalerCropRegion->top;
1424    }
1425    if (roi_x_max > crop_x_max) {
1426        roi_x_max = crop_x_max;
1427    }
1428    if (roi_y_max > crop_y_max) {
1429        roi_y_max = crop_y_max;
1430    }
1431    roi->rect.width = roi_x_max - roi->rect.left;
1432    roi->rect.height = roi_y_max - roi->rect.top;
1433    return true;
1434}
1435
1436/*===========================================================================
1437 * FUNCTION   : convertLandmarks
1438 *
1439 * DESCRIPTION: helper method to extract the landmarks from face detection info
1440 *
1441 * PARAMETERS :
1442 *   @face   : cam_rect_t struct to convert
1443 *   @landmarks : int32_t destination array
1444 *
1445 *
1446 *==========================================================================*/
1447void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1448{
1449    landmarks[0] = face.left_eye_center.x;
1450    landmarks[1] = face.left_eye_center.y;
1451    landmarks[2] = face.right_eye_center.y;
1452    landmarks[3] = face.right_eye_center.y;
1453    landmarks[4] = face.mouth_center.x;
1454    landmarks[5] = face.mouth_center.y;
1455}
1456
1457#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1458/*===========================================================================
1459 * FUNCTION   : initCapabilities
1460 *
1461 * DESCRIPTION: initialize camera capabilities in static data struct
1462 *
1463 * PARAMETERS :
1464 *   @cameraId  : camera Id
1465 *
1466 * RETURN     : int32_t type of status
1467 *              NO_ERROR  -- success
1468 *              none-zero failure code
1469 *==========================================================================*/
1470int QCamera3HardwareInterface::initCapabilities(int cameraId)
1471{
1472    int rc = 0;
1473    mm_camera_vtbl_t *cameraHandle = NULL;
1474    QCamera3HeapMemory *capabilityHeap = NULL;
1475
1476    cameraHandle = camera_open(cameraId);
1477    if (!cameraHandle) {
1478        ALOGE("%s: camera_open failed", __func__);
1479        rc = -1;
1480        goto open_failed;
1481    }
1482
1483    capabilityHeap = new QCamera3HeapMemory();
1484    if (capabilityHeap == NULL) {
1485        ALOGE("%s: creation of capabilityHeap failed", __func__);
1486        goto heap_creation_failed;
1487    }
1488    /* Allocate memory for capability buffer */
1489    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1490    if(rc != OK) {
1491        ALOGE("%s: No memory for cappability", __func__);
1492        goto allocate_failed;
1493    }
1494
1495    /* Map memory for capability buffer */
1496    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1497    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1498                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1499                                capabilityHeap->getFd(0),
1500                                sizeof(cam_capability_t));
1501    if(rc < 0) {
1502        ALOGE("%s: failed to map capability buffer", __func__);
1503        goto map_failed;
1504    }
1505
1506    /* Query Capability */
1507    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1508    if(rc < 0) {
1509        ALOGE("%s: failed to query capability",__func__);
1510        goto query_failed;
1511    }
1512    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1513    if (!gCamCapability[cameraId]) {
1514        ALOGE("%s: out of memory", __func__);
1515        goto query_failed;
1516    }
1517    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1518                                        sizeof(cam_capability_t));
1519    rc = 0;
1520
1521query_failed:
1522    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1523                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1524map_failed:
1525    capabilityHeap->deallocate();
1526allocate_failed:
1527    delete capabilityHeap;
1528heap_creation_failed:
1529    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1530    cameraHandle = NULL;
1531open_failed:
1532    return rc;
1533}
1534
1535/*===========================================================================
1536 * FUNCTION   : initParameters
1537 *
1538 * DESCRIPTION: initialize camera parameters
1539 *
1540 * PARAMETERS :
1541 *
1542 * RETURN     : int32_t type of status
1543 *              NO_ERROR  -- success
1544 *              none-zero failure code
1545 *==========================================================================*/
1546int QCamera3HardwareInterface::initParameters()
1547{
1548    int rc = 0;
1549
1550    //Allocate Set Param Buffer
1551    mParamHeap = new QCamera3HeapMemory();
1552    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1553    if(rc != OK) {
1554        rc = NO_MEMORY;
1555        ALOGE("Failed to allocate SETPARM Heap memory");
1556        delete mParamHeap;
1557        mParamHeap = NULL;
1558        return rc;
1559    }
1560
1561    //Map memory for parameters buffer
1562    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1563            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1564            mParamHeap->getFd(0),
1565            sizeof(parm_buffer_t));
1566    if(rc < 0) {
1567        ALOGE("%s:failed to map SETPARM buffer",__func__);
1568        rc = FAILED_TRANSACTION;
1569        mParamHeap->deallocate();
1570        delete mParamHeap;
1571        mParamHeap = NULL;
1572        return rc;
1573    }
1574
1575    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1576    return rc;
1577}
1578
1579/*===========================================================================
1580 * FUNCTION   : deinitParameters
1581 *
1582 * DESCRIPTION: de-initialize camera parameters
1583 *
1584 * PARAMETERS :
1585 *
1586 * RETURN     : NONE
1587 *==========================================================================*/
1588void QCamera3HardwareInterface::deinitParameters()
1589{
1590    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1591            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1592
1593    mParamHeap->deallocate();
1594    delete mParamHeap;
1595    mParamHeap = NULL;
1596
1597    mParameters = NULL;
1598}
1599
1600/*===========================================================================
1601 * FUNCTION   : calcMaxJpegSize
1602 *
1603 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1604 *
1605 * PARAMETERS :
1606 *
1607 * RETURN     : max_jpeg_size
1608 *==========================================================================*/
1609int QCamera3HardwareInterface::calcMaxJpegSize()
1610{
1611    int32_t max_jpeg_size = 0;
1612    int temp_width, temp_height;
1613    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1614        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1615        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1616        if (temp_width * temp_height > max_jpeg_size ) {
1617            max_jpeg_size = temp_width * temp_height;
1618        }
1619    }
1620    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1621    return max_jpeg_size;
1622}
1623
1624/*===========================================================================
1625 * FUNCTION   : initStaticMetadata
1626 *
1627 * DESCRIPTION: initialize the static metadata
1628 *
1629 * PARAMETERS :
1630 *   @cameraId  : camera Id
1631 *
1632 * RETURN     : int32_t type of status
1633 *              0  -- success
1634 *              non-zero failure code
1635 *==========================================================================*/
1636int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1637{
1638    int rc = 0;
1639    CameraMetadata staticInfo;
1640    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1641    /*HAL 3 only*/
1642    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1643                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1644
1645    /*hard coded for now but this should come from sensor*/
1646    float min_focus_distance;
1647    if(facingBack){
1648        min_focus_distance = 10;
1649    } else {
1650        min_focus_distance = 0;
1651    }
1652    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1653                    &min_focus_distance, 1);
1654
1655    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1656                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1657
1658    /*should be using focal lengths but sensor doesn't provide that info now*/
1659    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1660                      &gCamCapability[cameraId]->focal_length,
1661                      1);
1662
1663    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1664                      gCamCapability[cameraId]->apertures,
1665                      gCamCapability[cameraId]->apertures_count);
1666
1667    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1668                gCamCapability[cameraId]->filter_densities,
1669                gCamCapability[cameraId]->filter_densities_count);
1670
1671
1672    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1673                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1674                      gCamCapability[cameraId]->optical_stab_modes_count);
1675
1676    staticInfo.update(ANDROID_LENS_POSITION,
1677                      gCamCapability[cameraId]->lens_position,
1678                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1679
1680    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1681                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1682    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1683                      lens_shading_map_size,
1684                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1685
1686    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map,
1687            sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float));
1688
1689    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1690                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1691    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1692            geo_correction_map_size,
1693            sizeof(geo_correction_map_size)/sizeof(int32_t));
1694
1695    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1696                       gCamCapability[cameraId]->geo_correction_map,
1697                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1698
1699    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1700            gCamCapability[cameraId]->sensor_physical_size, 2);
1701
1702    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1703            gCamCapability[cameraId]->exposure_time_range, 2);
1704
1705    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1706            &gCamCapability[cameraId]->max_frame_duration, 1);
1707
1708
1709    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1710                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1711
1712    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1713                                               gCamCapability[cameraId]->pixel_array_size.height};
1714    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1715                      pixel_array_size, 2);
1716
1717    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width,
1718                                                gCamCapability[cameraId]->active_array_size.height};
1719
1720    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1721                      active_array_size, 2);
1722
1723    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1724            &gCamCapability[cameraId]->white_level, 1);
1725
1726    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1727            gCamCapability[cameraId]->black_level_pattern, 4);
1728
1729    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1730                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1731
1732    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1733                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1734
1735    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1736                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1737    /*hardcode 0 for now*/
1738    int32_t max_face_count = 0;
1739    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1740                      &max_face_count, 1);
1741
1742    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1743                      &gCamCapability[cameraId]->histogram_size, 1);
1744
1745    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1746            &gCamCapability[cameraId]->max_histogram_count, 1);
1747
1748    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1749                                                gCamCapability[cameraId]->sharpness_map_size.height};
1750
1751    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1752            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1753
1754    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1755            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1756
1757
1758    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1759                      &gCamCapability[cameraId]->raw_min_duration,
1760                       1);
1761
1762    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888};
1763    int scalar_formats_count = 1;
1764    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1765                      scalar_formats,
1766                      scalar_formats_count);
1767
1768    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1769    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1770              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1771              available_processed_sizes);
1772    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1773                available_processed_sizes,
1774                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1775
1776    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1777    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1778                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1779                 available_fps_ranges);
1780    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1781            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1782
1783    camera_metadata_rational exposureCompensationStep = {
1784            gCamCapability[cameraId]->exp_compensation_step.numerator,
1785            gCamCapability[cameraId]->exp_compensation_step.denominator};
1786    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1787                      &exposureCompensationStep, 1);
1788
1789    /*TO DO*/
1790    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1791    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1792                      availableVstabModes, sizeof(availableVstabModes));
1793
1794    /*HAL 1 and HAL 3 common*/
1795    float maxZoom = 4;
1796    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1797            &maxZoom, 1);
1798
1799    int32_t max3aRegions = 1;
1800    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1801            &max3aRegions, 1);
1802
1803    uint8_t availableFaceDetectModes[] = {
1804            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1805    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1806                      availableFaceDetectModes,
1807                      sizeof(availableFaceDetectModes));
1808
1809    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1810                                       gCamCapability[cameraId]->raw_dim.height};
1811    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1812                      raw_size,
1813                      sizeof(raw_size)/sizeof(uint32_t));
1814
1815    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1816                                                        gCamCapability[cameraId]->exposure_compensation_max};
1817    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1818            exposureCompensationRange,
1819            sizeof(exposureCompensationRange)/sizeof(int32_t));
1820
1821    uint8_t lensFacing = (facingBack) ?
1822            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1823    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1824
1825    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1826    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1827              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1828              available_jpeg_sizes);
1829    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1830                available_jpeg_sizes,
1831                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1832
1833    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1834                      available_thumbnail_sizes,
1835                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1836
1837    int32_t max_jpeg_size = 0;
1838    int temp_width, temp_height;
1839    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1840        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1841        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1842        if (temp_width * temp_height > max_jpeg_size ) {
1843            max_jpeg_size = temp_width * temp_height;
1844        }
1845    }
1846    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1847    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1848                      &max_jpeg_size, 1);
1849
1850    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1851    int32_t size = 0;
1852    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1853        int val = lookupFwkName(EFFECT_MODES_MAP,
1854                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1855                                   gCamCapability[cameraId]->supported_effects[i]);
1856        if (val != NAME_NOT_FOUND) {
1857            avail_effects[size] = (uint8_t)val;
1858            size++;
1859        }
1860    }
1861    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1862                      avail_effects,
1863                      size);
1864
1865    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1866    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1867    int32_t supported_scene_modes_cnt = 0;
1868    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1869        int val = lookupFwkName(SCENE_MODES_MAP,
1870                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1871                                gCamCapability[cameraId]->supported_scene_modes[i]);
1872        if (val != NAME_NOT_FOUND) {
1873            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1874            supported_indexes[supported_scene_modes_cnt] = i;
1875            supported_scene_modes_cnt++;
1876        }
1877    }
1878
1879    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1880                      avail_scene_modes,
1881                      supported_scene_modes_cnt);
1882
1883    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1884    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1885                      supported_scene_modes_cnt,
1886                      scene_mode_overrides,
1887                      supported_indexes,
1888                      cameraId);
1889    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1890                      scene_mode_overrides,
1891                      supported_scene_modes_cnt*3);
1892
1893    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1894    size = 0;
1895    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1896        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1897                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1898                                 gCamCapability[cameraId]->supported_antibandings[i]);
1899        if (val != NAME_NOT_FOUND) {
1900            avail_antibanding_modes[size] = (uint8_t)val;
1901            size++;
1902        }
1903
1904    }
1905    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1906                      avail_antibanding_modes,
1907                      size);
1908
1909    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1910    size = 0;
1911    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1912        int val = lookupFwkName(FOCUS_MODES_MAP,
1913                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1914                                gCamCapability[cameraId]->supported_focus_modes[i]);
1915        if (val != NAME_NOT_FOUND) {
1916            avail_af_modes[size] = (uint8_t)val;
1917            size++;
1918        }
1919    }
1920    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1921                      avail_af_modes,
1922                      size);
1923
1924    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1925    size = 0;
1926    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1927        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1928                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1929                                    gCamCapability[cameraId]->supported_white_balances[i]);
1930        if (val != NAME_NOT_FOUND) {
1931            avail_awb_modes[size] = (uint8_t)val;
1932            size++;
1933        }
1934    }
1935    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1936                      avail_awb_modes,
1937                      size);
1938
1939    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1940    size = 0;
1941    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1942        int val = lookupFwkName(FLASH_MODES_MAP,
1943                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1944                                gCamCapability[cameraId]->supported_flash_modes[i]);
1945        if (val != NAME_NOT_FOUND) {
1946            avail_flash_modes[size] = (uint8_t)val;
1947            size++;
1948        }
1949    }
1950    static uint8_t flashAvailable = 0;
1951    if (size > 1) {
1952        //flash is supported
1953        flashAvailable = 1;
1954    }
1955    staticInfo.update(ANDROID_FLASH_MODE,
1956                      avail_flash_modes,
1957                      size);
1958
1959    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
1960            &flashAvailable, 1);
1961
1962    uint8_t avail_ae_modes[5];
1963    size = 0;
1964    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
1965        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
1966        size++;
1967    }
1968    if (flashAvailable) {
1969        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
1970        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
1971        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
1972    }
1973    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1974                      avail_ae_modes,
1975                      size);
1976    size = 0;
1977    int32_t avail_sensitivities[CAM_ISO_MODE_MAX];
1978    for (int i = 0; i < gCamCapability[cameraId]->supported_iso_modes_cnt; i++) {
1979        int32_t sensitivity = getSensorSensitivity(gCamCapability[cameraId]->supported_iso_modes[i]);
1980        if (sensitivity != -1) {
1981            avail_sensitivities[size] = sensitivity;
1982            size++;
1983        }
1984    }
1985    staticInfo.update(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES,
1986                      avail_sensitivities,
1987                      size);
1988
1989    gStaticMetadata[cameraId] = staticInfo.release();
1990    return rc;
1991}
1992
1993/*===========================================================================
1994 * FUNCTION   : makeTable
1995 *
1996 * DESCRIPTION: make a table of sizes
1997 *
1998 * PARAMETERS :
1999 *
2000 *
2001 *==========================================================================*/
2002void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2003                                          int32_t* sizeTable)
2004{
2005    int j = 0;
2006    for (int i = 0; i < size; i++) {
2007        sizeTable[j] = dimTable[i].width;
2008        sizeTable[j+1] = dimTable[i].height;
2009        j+=2;
2010    }
2011}
2012
2013/*===========================================================================
2014 * FUNCTION   : makeFPSTable
2015 *
2016 * DESCRIPTION: make a table of fps ranges
2017 *
2018 * PARAMETERS :
2019 *
2020 *==========================================================================*/
2021void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2022                                          int32_t* fpsRangesTable)
2023{
2024    int j = 0;
2025    for (int i = 0; i < size; i++) {
2026        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2027        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2028        j+=2;
2029    }
2030}
2031
2032/*===========================================================================
2033 * FUNCTION   : makeOverridesList
2034 *
2035 * DESCRIPTION: make a list of scene mode overrides
2036 *
2037 * PARAMETERS :
2038 *
2039 *
2040 *==========================================================================*/
2041void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2042                                                  uint8_t size, uint8_t* overridesList,
2043                                                  uint8_t* supported_indexes,
2044                                                  int camera_id)
2045{
2046    /*daemon will give a list of overrides for all scene modes.
2047      However we should send the fwk only the overrides for the scene modes
2048      supported by the framework*/
2049    int j = 0, index = 0, supt = 0;
2050    uint8_t focus_override;
2051    for (int i = 0; i < size; i++) {
2052        supt = 0;
2053        index = supported_indexes[i];
2054        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2055        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2056                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2057                                                    overridesTable[index].awb_mode);
2058        focus_override = (uint8_t)overridesTable[index].af_mode;
2059        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2060           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2061              supt = 1;
2062              break;
2063           }
2064        }
2065        if (supt) {
2066           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2067                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2068                                              focus_override);
2069        } else {
2070           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2071        }
2072        j+=3;
2073    }
2074}
2075
2076/*===========================================================================
2077 * FUNCTION   : getPreviewHalPixelFormat
2078 *
2079 * DESCRIPTION: convert the format to type recognized by framework
2080 *
2081 * PARAMETERS : format : the format from backend
2082 *
2083 ** RETURN    : format recognized by framework
2084 *
2085 *==========================================================================*/
2086int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2087{
2088    int32_t halPixelFormat;
2089
2090    switch (format) {
2091    case CAM_FORMAT_YUV_420_NV12:
2092        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2093        break;
2094    case CAM_FORMAT_YUV_420_NV21:
2095        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2096        break;
2097    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2098        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2099        break;
2100    case CAM_FORMAT_YUV_420_YV12:
2101        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2102        break;
2103    case CAM_FORMAT_YUV_422_NV16:
2104    case CAM_FORMAT_YUV_422_NV61:
2105    default:
2106        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2107        break;
2108    }
2109    return halPixelFormat;
2110}
2111
2112/*===========================================================================
2113 * FUNCTION   : getSensorSensitivity
2114 *
2115 * DESCRIPTION: convert iso_mode to an integer value
2116 *
2117 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2118 *
2119 ** RETURN    : sensitivity supported by sensor
2120 *
2121 *==========================================================================*/
2122int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2123{
2124    int32_t sensitivity;
2125
2126    switch (iso_mode) {
2127    case CAM_ISO_MODE_100:
2128        sensitivity = 100;
2129        break;
2130    case CAM_ISO_MODE_200:
2131        sensitivity = 200;
2132        break;
2133    case CAM_ISO_MODE_400:
2134        sensitivity = 400;
2135        break;
2136    case CAM_ISO_MODE_800:
2137        sensitivity = 800;
2138        break;
2139    case CAM_ISO_MODE_1600:
2140        sensitivity = 1600;
2141        break;
2142    default:
2143        sensitivity = -1;
2144        break;
2145    }
2146    return sensitivity;
2147}
2148
2149
2150/*===========================================================================
2151 * FUNCTION   : AddSetParmEntryToBatch
2152 *
2153 * DESCRIPTION: add set parameter entry into batch
2154 *
2155 * PARAMETERS :
2156 *   @p_table     : ptr to parameter buffer
2157 *   @paramType   : parameter type
2158 *   @paramLength : length of parameter value
2159 *   @paramValue  : ptr to parameter value
2160 *
2161 * RETURN     : int32_t type of status
2162 *              NO_ERROR  -- success
2163 *              none-zero failure code
2164 *==========================================================================*/
2165int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2166                                                          cam_intf_parm_type_t paramType,
2167                                                          uint32_t paramLength,
2168                                                          void *paramValue)
2169{
2170    int position = paramType;
2171    int current, next;
2172
2173    /*************************************************************************
2174    *                 Code to take care of linking next flags                *
2175    *************************************************************************/
2176    current = GET_FIRST_PARAM_ID(p_table);
2177    if (position == current){
2178        //DO NOTHING
2179    } else if (position < current){
2180        SET_NEXT_PARAM_ID(position, p_table, current);
2181        SET_FIRST_PARAM_ID(p_table, position);
2182    } else {
2183        /* Search for the position in the linked list where we need to slot in*/
2184        while (position > GET_NEXT_PARAM_ID(current, p_table))
2185            current = GET_NEXT_PARAM_ID(current, p_table);
2186
2187        /*If node already exists no need to alter linking*/
2188        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2189            next = GET_NEXT_PARAM_ID(current, p_table);
2190            SET_NEXT_PARAM_ID(current, p_table, position);
2191            SET_NEXT_PARAM_ID(position, p_table, next);
2192        }
2193    }
2194
2195    /*************************************************************************
2196    *                   Copy contents into entry                             *
2197    *************************************************************************/
2198
2199    if (paramLength > sizeof(parm_type_t)) {
2200        ALOGE("%s:Size of input larger than max entry size",__func__);
2201        return BAD_VALUE;
2202    }
2203    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2204    return NO_ERROR;
2205}
2206
2207/*===========================================================================
2208 * FUNCTION   : lookupFwkName
2209 *
2210 * DESCRIPTION: In case the enum is not same in fwk and backend
2211 *              make sure the parameter is correctly propogated
2212 *
2213 * PARAMETERS  :
2214 *   @arr      : map between the two enums
2215 *   @len      : len of the map
2216 *   @hal_name : name of the hal_parm to map
2217 *
2218 * RETURN     : int type of status
2219 *              fwk_name  -- success
2220 *              none-zero failure code
2221 *==========================================================================*/
2222int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2223                                             int len, int hal_name)
2224{
2225
2226    for (int i = 0; i < len; i++) {
2227        if (arr[i].hal_name == hal_name)
2228            return arr[i].fwk_name;
2229    }
2230
2231    /* Not able to find matching framework type is not necessarily
2232     * an error case. This happens when mm-camera supports more attributes
2233     * than the frameworks do */
2234    ALOGD("%s: Cannot find matching framework type", __func__);
2235    return NAME_NOT_FOUND;
2236}
2237
2238/*===========================================================================
2239 * FUNCTION   : lookupHalName
2240 *
2241 * DESCRIPTION: In case the enum is not same in fwk and backend
2242 *              make sure the parameter is correctly propogated
2243 *
2244 * PARAMETERS  :
2245 *   @arr      : map between the two enums
2246 *   @len      : len of the map
2247 *   @fwk_name : name of the hal_parm to map
2248 *
2249 * RETURN     : int32_t type of status
2250 *              hal_name  -- success
2251 *              none-zero failure code
2252 *==========================================================================*/
2253int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2254                                             int len, int fwk_name)
2255{
2256    for (int i = 0; i < len; i++) {
2257       if (arr[i].fwk_name == fwk_name)
2258           return arr[i].hal_name;
2259    }
2260    ALOGE("%s: Cannot find matching hal type", __func__);
2261    return NAME_NOT_FOUND;
2262}
2263
2264/*===========================================================================
2265 * FUNCTION   : getCapabilities
2266 *
2267 * DESCRIPTION: query camera capabilities
2268 *
2269 * PARAMETERS :
2270 *   @cameraId  : camera Id
2271 *   @info      : camera info struct to be filled in with camera capabilities
2272 *
2273 * RETURN     : int32_t type of status
2274 *              NO_ERROR  -- success
2275 *              none-zero failure code
2276 *==========================================================================*/
2277int QCamera3HardwareInterface::getCamInfo(int cameraId,
2278                                    struct camera_info *info)
2279{
2280    int rc = 0;
2281
2282    if (NULL == gCamCapability[cameraId]) {
2283        rc = initCapabilities(cameraId);
2284        if (rc < 0) {
2285            //pthread_mutex_unlock(&g_camlock);
2286            return rc;
2287        }
2288    }
2289
2290    if (NULL == gStaticMetadata[cameraId]) {
2291        rc = initStaticMetadata(cameraId);
2292        if (rc < 0) {
2293            return rc;
2294        }
2295    }
2296
2297    switch(gCamCapability[cameraId]->position) {
2298    case CAM_POSITION_BACK:
2299        info->facing = CAMERA_FACING_BACK;
2300        break;
2301
2302    case CAM_POSITION_FRONT:
2303        info->facing = CAMERA_FACING_FRONT;
2304        break;
2305
2306    default:
2307        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2308        rc = -1;
2309        break;
2310    }
2311
2312
2313    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2314    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2315    info->static_camera_characteristics = gStaticMetadata[cameraId];
2316
2317    return rc;
2318}
2319
2320/*===========================================================================
2321 * FUNCTION   : translateMetadata
2322 *
2323 * DESCRIPTION: translate the metadata into camera_metadata_t
2324 *
2325 * PARAMETERS : type of the request
2326 *
2327 *
2328 * RETURN     : success: camera_metadata_t*
2329 *              failure: NULL
2330 *
2331 *==========================================================================*/
2332camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2333{
2334    pthread_mutex_lock(&mMutex);
2335
2336    if (mDefaultMetadata[type] != NULL) {
2337        pthread_mutex_unlock(&mMutex);
2338        return mDefaultMetadata[type];
2339    }
2340    //first time we are handling this request
2341    //fill up the metadata structure using the wrapper class
2342    CameraMetadata settings;
2343    //translate from cam_capability_t to camera_metadata_tag_t
2344    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2345    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2346
2347    /*control*/
2348
2349    uint8_t controlIntent = 0;
2350    switch (type) {
2351      case CAMERA3_TEMPLATE_PREVIEW:
2352        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2353        break;
2354      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2355        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2356        break;
2357      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2358        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2359        break;
2360      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2361        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2362        break;
2363      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2364        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2365        break;
2366      default:
2367        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2368        break;
2369    }
2370    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2371
2372    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2373            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2374
2375    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2376    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2377
2378    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2379    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2380
2381    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2382    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2383
2384    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2385    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2386
2387    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2388    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2389
2390    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2391    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2392
2393    static uint8_t focusMode;
2394    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2395        ALOGE("%s: Setting focus mode to auto", __func__);
2396        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2397    } else {
2398        ALOGE("%s: Setting focus mode to off", __func__);
2399        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2400    }
2401    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2402
2403    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2404    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2405
2406    /*flash*/
2407    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2408    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2409
2410
2411    /* lens */
2412    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2413    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2414
2415    if (gCamCapability[mCameraId]->filter_densities_count) {
2416        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2417        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2418                        gCamCapability[mCameraId]->filter_densities_count);
2419    }
2420
2421    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2422    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2423
2424    mDefaultMetadata[type] = settings.release();
2425
2426    pthread_mutex_unlock(&mMutex);
2427    return mDefaultMetadata[type];
2428}
2429
2430/*===========================================================================
2431 * FUNCTION   : setFrameParameters
2432 *
2433 * DESCRIPTION: set parameters per frame as requested in the metadata from
2434 *              framework
2435 *
2436 * PARAMETERS :
2437 *   @settings  : frame settings information from framework
2438 *
2439 *
2440 * RETURN     : success: NO_ERROR
2441 *              failure:
2442 *==========================================================================*/
2443int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2444                                                  const camera_metadata_t *settings)
2445{
2446    /*translate from camera_metadata_t type to parm_type_t*/
2447    int rc = 0;
2448    if (settings == NULL && mFirstRequest) {
2449        /*settings cannot be null for the first request*/
2450        return BAD_VALUE;
2451    }
2452
2453    int32_t hal_version = CAM_HAL_V3;
2454
2455    memset(mParameters, 0, sizeof(parm_buffer_t));
2456    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2457    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2458                sizeof(hal_version), &hal_version);
2459
2460    /*we need to update the frame number in the parameters*/
2461    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2462                                sizeof(frame_id), &frame_id);
2463    if (rc < 0) {
2464        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2465        return BAD_VALUE;
2466    }
2467
2468    if(settings != NULL){
2469        rc = translateMetadataToParameters(settings);
2470    }
2471    /*set the parameters to backend*/
2472    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2473    return rc;
2474}
2475
2476/*===========================================================================
2477 * FUNCTION   : translateMetadataToParameters
2478 *
2479 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2480 *
2481 *
2482 * PARAMETERS :
2483 *   @settings  : frame settings information from framework
2484 *
2485 *
2486 * RETURN     : success: NO_ERROR
2487 *              failure:
2488 *==========================================================================*/
2489int QCamera3HardwareInterface::translateMetadataToParameters
2490                                  (const camera_metadata_t *settings)
2491{
2492    int rc = 0;
2493    CameraMetadata frame_settings;
2494    frame_settings = settings;
2495
2496
2497    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2498        int32_t antibandingMode =
2499            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2500        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2501                sizeof(antibandingMode), &antibandingMode);
2502    }
2503
2504    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2505        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2506        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2507          sizeof(expCompensation), &expCompensation);
2508    }
2509
2510    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2511        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2512        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2513                sizeof(aeLock), &aeLock);
2514    }
2515
2516    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2517        cam_fps_range_t fps_range;
2518        fps_range.min_fps =
2519            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2520        fps_range.max_fps =
2521            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2522        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2523                sizeof(fps_range), &fps_range);
2524    }
2525
2526    float focalDistance = -1.0;
2527    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2528        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2529        rc = AddSetParmEntryToBatch(mParameters,
2530                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2531                sizeof(focalDistance), &focalDistance);
2532    }
2533
2534    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2535        uint8_t fwk_focusMode =
2536            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2537        uint8_t focusMode;
2538        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2539            focusMode = CAM_FOCUS_MODE_INFINITY;
2540        } else{
2541         focusMode = lookupHalName(FOCUS_MODES_MAP,
2542                                   sizeof(FOCUS_MODES_MAP),
2543                                   fwk_focusMode);
2544        }
2545        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2546                sizeof(focusMode), &focusMode);
2547    }
2548
2549    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2550        uint8_t awbLock =
2551            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2552        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2553                sizeof(awbLock), &awbLock);
2554    }
2555
2556    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2557        uint8_t fwk_whiteLevel =
2558            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2559        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2560                sizeof(WHITE_BALANCE_MODES_MAP),
2561                fwk_whiteLevel);
2562        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2563                sizeof(whiteLevel), &whiteLevel);
2564    }
2565
2566    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2567        uint8_t fwk_effectMode =
2568            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2569        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2570                sizeof(EFFECT_MODES_MAP),
2571                fwk_effectMode);
2572        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2573                sizeof(effectMode), &effectMode);
2574    }
2575
2576    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2577        uint8_t fwk_aeMode =
2578            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2579        uint8_t aeMode;
2580        int32_t redeye;
2581        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2582            aeMode = CAM_AE_MODE_OFF;
2583        } else {
2584            aeMode = CAM_AE_MODE_ON;
2585        }
2586        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2587            redeye = 1;
2588        } else {
2589            redeye = 0;
2590        }
2591        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2592                                          sizeof(AE_FLASH_MODE_MAP),
2593                                          aeMode);
2594        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2595                sizeof(aeMode), &aeMode);
2596        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2597                sizeof(flashMode), &flashMode);
2598        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2599                sizeof(redeye), &redeye);
2600    }
2601
2602    if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) {
2603        int32_t metaFrameNumber =
2604            frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0];
2605        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2606                sizeof(metaFrameNumber), &metaFrameNumber);
2607    }
2608
2609    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2610        uint8_t colorCorrectMode =
2611            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2612        rc =
2613            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2614                    sizeof(colorCorrectMode), &colorCorrectMode);
2615    }
2616    cam_trigger_t aecTrigger;
2617    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2618    aecTrigger.trigger_id = -1;
2619    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2620        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2621        aecTrigger.trigger =
2622            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2623        aecTrigger.trigger_id =
2624            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2625    }
2626    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2627                                sizeof(aecTrigger), &aecTrigger);
2628
2629    /*af_trigger must come with a trigger id*/
2630    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2631        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2632        cam_trigger_t af_trigger;
2633        af_trigger.trigger =
2634            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2635        af_trigger.trigger_id =
2636            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2637        rc = AddSetParmEntryToBatch(mParameters,
2638                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2639    }
2640
2641    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2642        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2643        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2644                sizeof(metaMode), &metaMode);
2645        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2646           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2647           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2648                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2649                                             fwk_sceneMode);
2650           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2651                sizeof(sceneMode), &sceneMode);
2652        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2653           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2654           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2655                sizeof(sceneMode), &sceneMode);
2656        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2657           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2658           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2659                sizeof(sceneMode), &sceneMode);
2660        }
2661    }
2662
2663    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2664        int32_t demosaic =
2665            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2666        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2667                sizeof(demosaic), &demosaic);
2668    }
2669
2670    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2671        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2672        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2673                sizeof(edgeMode), &edgeMode);
2674    }
2675
2676    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2677        int32_t edgeStrength =
2678            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2679        rc = AddSetParmEntryToBatch(mParameters,
2680                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2681    }
2682
2683    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2684        uint8_t flashMode =
2685            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2686        rc = AddSetParmEntryToBatch(mParameters,
2687                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2688    }
2689
2690    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2691        uint8_t flashPower =
2692            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2693        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2694                sizeof(flashPower), &flashPower);
2695    }
2696
2697    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2698        int64_t flashFiringTime =
2699            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2700        rc = AddSetParmEntryToBatch(mParameters,
2701                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2702    }
2703
2704    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2705        uint8_t geometricMode =
2706            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2707        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2708                sizeof(geometricMode), &geometricMode);
2709    }
2710
2711    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2712        uint8_t geometricStrength =
2713            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2714        rc = AddSetParmEntryToBatch(mParameters,
2715                CAM_INTF_META_GEOMETRIC_STRENGTH,
2716                sizeof(geometricStrength), &geometricStrength);
2717    }
2718
2719    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2720        uint8_t hotPixelMode =
2721            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2722        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2723                sizeof(hotPixelMode), &hotPixelMode);
2724    }
2725
2726    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2727        float lensAperture =
2728            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2729        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2730                sizeof(lensAperture), &lensAperture);
2731    }
2732
2733    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2734        float filterDensity =
2735            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2736        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2737                sizeof(filterDensity), &filterDensity);
2738    }
2739
2740    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2741        float focalLength =
2742            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2743        rc = AddSetParmEntryToBatch(mParameters,
2744                CAM_INTF_META_LENS_FOCAL_LENGTH,
2745                sizeof(focalLength), &focalLength);
2746    }
2747
2748    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2749        uint8_t optStabMode =
2750            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2751        rc = AddSetParmEntryToBatch(mParameters,
2752                CAM_INTF_META_LENS_OPT_STAB_MODE,
2753                sizeof(optStabMode), &optStabMode);
2754    }
2755
2756    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2757        uint8_t noiseRedMode =
2758            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2759        rc = AddSetParmEntryToBatch(mParameters,
2760                CAM_INTF_META_NOISE_REDUCTION_MODE,
2761                sizeof(noiseRedMode), &noiseRedMode);
2762    }
2763
2764    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2765        uint8_t noiseRedStrength =
2766            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2767        rc = AddSetParmEntryToBatch(mParameters,
2768                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2769                sizeof(noiseRedStrength), &noiseRedStrength);
2770    }
2771
2772    cam_crop_region_t scalerCropRegion;
2773    bool scalerCropSet = false;
2774    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2775        scalerCropRegion.left =
2776            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2777        scalerCropRegion.top =
2778            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2779        scalerCropRegion.width =
2780            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2781        scalerCropRegion.height =
2782            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2783        rc = AddSetParmEntryToBatch(mParameters,
2784                CAM_INTF_META_SCALER_CROP_REGION,
2785                sizeof(scalerCropRegion), &scalerCropRegion);
2786        scalerCropSet = true;
2787    }
2788
2789    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2790        int64_t sensorExpTime =
2791            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2792        rc = AddSetParmEntryToBatch(mParameters,
2793                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2794                sizeof(sensorExpTime), &sensorExpTime);
2795    }
2796
2797    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2798        int64_t sensorFrameDuration =
2799            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2800        rc = AddSetParmEntryToBatch(mParameters,
2801                CAM_INTF_META_SENSOR_FRAME_DURATION,
2802                sizeof(sensorFrameDuration), &sensorFrameDuration);
2803    }
2804
2805    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2806        int32_t sensorSensitivity =
2807            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2808        rc = AddSetParmEntryToBatch(mParameters,
2809                CAM_INTF_META_SENSOR_SENSITIVITY,
2810                sizeof(sensorSensitivity), &sensorSensitivity);
2811    }
2812
2813    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2814        int32_t shadingMode =
2815            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2816        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2817                sizeof(shadingMode), &shadingMode);
2818    }
2819
2820    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2821        uint8_t shadingStrength =
2822            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2823        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2824                sizeof(shadingStrength), &shadingStrength);
2825    }
2826
2827    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2828        uint8_t facedetectMode =
2829            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2830        rc = AddSetParmEntryToBatch(mParameters,
2831                CAM_INTF_META_STATS_FACEDETECT_MODE,
2832                sizeof(facedetectMode), &facedetectMode);
2833    }
2834
2835    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2836        uint8_t histogramMode =
2837            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2838        rc = AddSetParmEntryToBatch(mParameters,
2839                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2840                sizeof(histogramMode), &histogramMode);
2841    }
2842
2843    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2844        uint8_t sharpnessMapMode =
2845            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2846        rc = AddSetParmEntryToBatch(mParameters,
2847                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2848                sizeof(sharpnessMapMode), &sharpnessMapMode);
2849    }
2850
2851    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2852        uint8_t tonemapMode =
2853            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2854        rc = AddSetParmEntryToBatch(mParameters,
2855                CAM_INTF_META_TONEMAP_MODE,
2856                sizeof(tonemapMode), &tonemapMode);
2857    }
2858
2859    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2860        uint8_t captureIntent =
2861            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2862        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2863                sizeof(captureIntent), &captureIntent);
2864    }
2865
2866    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2867        cam_area_t roi;
2868        bool reset = true;
2869        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2870        if (scalerCropSet) {
2871            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2872        }
2873        if (reset) {
2874            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2875                    sizeof(roi), &roi);
2876        }
2877    }
2878
2879    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2880        cam_area_t roi;
2881        bool reset = true;
2882        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2883        if (scalerCropSet) {
2884            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2885        }
2886        if (reset) {
2887            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2888                    sizeof(roi), &roi);
2889        }
2890    }
2891
2892    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2893        cam_area_t roi;
2894        bool reset = true;
2895        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2896        if (scalerCropSet) {
2897            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2898        }
2899        if (reset) {
2900            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2901                    sizeof(roi), &roi);
2902        }
2903    }
2904    return rc;
2905}
2906
2907/*===========================================================================
2908 * FUNCTION   : getJpegSettings
2909 *
2910 * DESCRIPTION: save the jpeg settings in the HAL
2911 *
2912 *
2913 * PARAMETERS :
2914 *   @settings  : frame settings information from framework
2915 *
2916 *
2917 * RETURN     : success: NO_ERROR
2918 *              failure:
2919 *==========================================================================*/
2920int QCamera3HardwareInterface::getJpegSettings
2921                                  (const camera_metadata_t *settings)
2922{
2923    if (mJpegSettings) {
2924        if (mJpegSettings->gps_timestamp) {
2925            free(mJpegSettings->gps_timestamp);
2926            mJpegSettings->gps_timestamp = NULL;
2927        }
2928        if (mJpegSettings->gps_coordinates) {
2929            for (int i = 0; i < 3; i++) {
2930                free(mJpegSettings->gps_coordinates[i]);
2931                mJpegSettings->gps_coordinates[i] = NULL;
2932            }
2933        }
2934        free(mJpegSettings);
2935        mJpegSettings = NULL;
2936    }
2937    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2938    CameraMetadata jpeg_settings;
2939    jpeg_settings = settings;
2940
2941    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2942        mJpegSettings->jpeg_orientation =
2943            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
2944    } else {
2945        mJpegSettings->jpeg_orientation = 0;
2946    }
2947    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
2948        mJpegSettings->jpeg_quality =
2949            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
2950    } else {
2951        mJpegSettings->jpeg_quality = 85;
2952    }
2953    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2954        mJpegSettings->thumbnail_size.width =
2955            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
2956        mJpegSettings->thumbnail_size.height =
2957            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
2958    } else {
2959        mJpegSettings->thumbnail_size.width = 0;
2960        mJpegSettings->thumbnail_size.height = 0;
2961    }
2962    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
2963        for (int i = 0; i < 3; i++) {
2964            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
2965            *(mJpegSettings->gps_coordinates[i]) =
2966                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
2967        }
2968    } else{
2969       for (int i = 0; i < 3; i++) {
2970            mJpegSettings->gps_coordinates[i] = NULL;
2971        }
2972    }
2973
2974    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
2975        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
2976        *(mJpegSettings->gps_timestamp) =
2977            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
2978    } else {
2979        mJpegSettings->gps_timestamp = NULL;
2980    }
2981
2982    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
2983        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
2984        for (int i = 0; i < len; i++) {
2985            mJpegSettings->gps_processing_method[i] =
2986                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
2987        }
2988        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
2989            mJpegSettings->gps_processing_method[len] = '\0';
2990        }
2991    } else {
2992        mJpegSettings->gps_processing_method[0] = '\0';
2993    }
2994
2995    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2996        mJpegSettings->sensor_sensitivity =
2997            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2998    } else {
2999        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3000    }
3001
3002    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3003        mJpegSettings->lens_focal_length =
3004            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3005    }
3006    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3007        mJpegSettings->exposure_compensation =
3008            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3009    }
3010    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3011    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3012    return 0;
3013}
3014
3015/*===========================================================================
3016 * FUNCTION   : captureResultCb
3017 *
3018 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3019 *
3020 * PARAMETERS :
3021 *   @frame  : frame information from mm-camera-interface
3022 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3023 *   @userdata: userdata
3024 *
3025 * RETURN     : NONE
3026 *==========================================================================*/
3027void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3028                camera3_stream_buffer_t *buffer,
3029                uint32_t frame_number, void *userdata)
3030{
3031    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3032    if (hw == NULL) {
3033        ALOGE("%s: Invalid hw %p", __func__, hw);
3034        return;
3035    }
3036
3037    hw->captureResultCb(metadata, buffer, frame_number);
3038    return;
3039}
3040
3041/*===========================================================================
3042 * FUNCTION   : initialize
3043 *
3044 * DESCRIPTION: Pass framework callback pointers to HAL
3045 *
3046 * PARAMETERS :
3047 *
3048 *
3049 * RETURN     : Success : 0
3050 *              Failure: -ENODEV
3051 *==========================================================================*/
3052
3053int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3054                                  const camera3_callback_ops_t *callback_ops)
3055{
3056    ALOGV("%s: E", __func__);
3057    QCamera3HardwareInterface *hw =
3058        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3059    if (!hw) {
3060        ALOGE("%s: NULL camera device", __func__);
3061        return -ENODEV;
3062    }
3063
3064    int rc = hw->initialize(callback_ops);
3065    ALOGV("%s: X", __func__);
3066    return rc;
3067}
3068
3069/*===========================================================================
3070 * FUNCTION   : configure_streams
3071 *
3072 * DESCRIPTION:
3073 *
3074 * PARAMETERS :
3075 *
3076 *
3077 * RETURN     : Success: 0
3078 *              Failure: -EINVAL (if stream configuration is invalid)
3079 *                       -ENODEV (fatal error)
3080 *==========================================================================*/
3081
3082int QCamera3HardwareInterface::configure_streams(
3083        const struct camera3_device *device,
3084        camera3_stream_configuration_t *stream_list)
3085{
3086    ALOGV("%s: E", __func__);
3087    QCamera3HardwareInterface *hw =
3088        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3089    if (!hw) {
3090        ALOGE("%s: NULL camera device", __func__);
3091        return -ENODEV;
3092    }
3093    int rc = hw->configureStreams(stream_list);
3094    ALOGV("%s: X", __func__);
3095    return rc;
3096}
3097
3098/*===========================================================================
3099 * FUNCTION   : register_stream_buffers
3100 *
3101 * DESCRIPTION: Register stream buffers with the device
3102 *
3103 * PARAMETERS :
3104 *
3105 * RETURN     :
3106 *==========================================================================*/
3107int QCamera3HardwareInterface::register_stream_buffers(
3108        const struct camera3_device *device,
3109        const camera3_stream_buffer_set_t *buffer_set)
3110{
3111    ALOGV("%s: E", __func__);
3112    QCamera3HardwareInterface *hw =
3113        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3114    if (!hw) {
3115        ALOGE("%s: NULL camera device", __func__);
3116        return -ENODEV;
3117    }
3118    int rc = hw->registerStreamBuffers(buffer_set);
3119    ALOGV("%s: X", __func__);
3120    return rc;
3121}
3122
3123/*===========================================================================
3124 * FUNCTION   : construct_default_request_settings
3125 *
3126 * DESCRIPTION: Configure a settings buffer to meet the required use case
3127 *
3128 * PARAMETERS :
3129 *
3130 *
3131 * RETURN     : Success: Return valid metadata
3132 *              Failure: Return NULL
3133 *==========================================================================*/
3134const camera_metadata_t* QCamera3HardwareInterface::
3135    construct_default_request_settings(const struct camera3_device *device,
3136                                        int type)
3137{
3138
3139    ALOGV("%s: E", __func__);
3140    camera_metadata_t* fwk_metadata = NULL;
3141    QCamera3HardwareInterface *hw =
3142        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3143    if (!hw) {
3144        ALOGE("%s: NULL camera device", __func__);
3145        return NULL;
3146    }
3147
3148    fwk_metadata = hw->translateCapabilityToMetadata(type);
3149
3150    ALOGV("%s: X", __func__);
3151    return fwk_metadata;
3152}
3153
3154/*===========================================================================
3155 * FUNCTION   : process_capture_request
3156 *
3157 * DESCRIPTION:
3158 *
3159 * PARAMETERS :
3160 *
3161 *
3162 * RETURN     :
3163 *==========================================================================*/
3164int QCamera3HardwareInterface::process_capture_request(
3165                    const struct camera3_device *device,
3166                    camera3_capture_request_t *request)
3167{
3168    ALOGV("%s: E", __func__);
3169    QCamera3HardwareInterface *hw =
3170        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3171    if (!hw) {
3172        ALOGE("%s: NULL camera device", __func__);
3173        return -EINVAL;
3174    }
3175
3176    int rc = hw->processCaptureRequest(request);
3177    ALOGV("%s: X", __func__);
3178    return rc;
3179}
3180
3181/*===========================================================================
3182 * FUNCTION   : get_metadata_vendor_tag_ops
3183 *
3184 * DESCRIPTION:
3185 *
3186 * PARAMETERS :
3187 *
3188 *
3189 * RETURN     :
3190 *==========================================================================*/
3191
3192void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3193                const struct camera3_device *device,
3194                vendor_tag_query_ops_t* ops)
3195{
3196    ALOGV("%s: E", __func__);
3197    QCamera3HardwareInterface *hw =
3198        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3199    if (!hw) {
3200        ALOGE("%s: NULL camera device", __func__);
3201        return;
3202    }
3203
3204    hw->getMetadataVendorTagOps(ops);
3205    ALOGV("%s: X", __func__);
3206    return;
3207}
3208
3209/*===========================================================================
3210 * FUNCTION   : dump
3211 *
3212 * DESCRIPTION:
3213 *
3214 * PARAMETERS :
3215 *
3216 *
3217 * RETURN     :
3218 *==========================================================================*/
3219
3220void QCamera3HardwareInterface::dump(
3221                const struct camera3_device *device, int fd)
3222{
3223    ALOGV("%s: E", __func__);
3224    QCamera3HardwareInterface *hw =
3225        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3226    if (!hw) {
3227        ALOGE("%s: NULL camera device", __func__);
3228        return;
3229    }
3230
3231    hw->dump(fd);
3232    ALOGV("%s: X", __func__);
3233    return;
3234}
3235
3236/*===========================================================================
3237 * FUNCTION   : close_camera_device
3238 *
3239 * DESCRIPTION:
3240 *
3241 * PARAMETERS :
3242 *
3243 *
3244 * RETURN     :
3245 *==========================================================================*/
3246int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3247{
3248    ALOGV("%s: E", __func__);
3249    int ret = NO_ERROR;
3250    QCamera3HardwareInterface *hw =
3251        reinterpret_cast<QCamera3HardwareInterface *>(
3252            reinterpret_cast<camera3_device_t *>(device)->priv);
3253    if (!hw) {
3254        ALOGE("NULL camera device");
3255        return BAD_VALUE;
3256    }
3257    delete hw;
3258
3259    pthread_mutex_lock(&mCameraSessionLock);
3260    mCameraSessionActive = 0;
3261    pthread_mutex_unlock(&mCameraSessionLock);
3262    ALOGV("%s: X", __func__);
3263    return ret;
3264}
3265
3266}; //end namespace qcamera
3267