QCamera3HWI.cpp revision 22817c71579a2516bd3bdf748a5bc5fa73b9f32b
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mFirstRequest(false),
162      mParamHeap(NULL),
163      mParameters(NULL),
164      mJpegSettings(NULL),
165      m_pPowerModule(NULL)
166{
167    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
168    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
169    mCameraDevice.common.close = close_camera_device;
170    mCameraDevice.ops = &mCameraOps;
171    mCameraDevice.priv = this;
172    gCamCapability[cameraId]->version = CAM_HAL_V3;
173
174    pthread_mutex_init(&mRequestLock, NULL);
175    pthread_cond_init(&mRequestCond, NULL);
176    mPendingRequest = 0;
177    mCurrentRequestId = -1;
178
179    pthread_mutex_init(&mMutex, NULL);
180    pthread_mutex_init(&mCaptureResultLock, NULL);
181
182    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
183        mDefaultMetadata[i] = NULL;
184
185#ifdef HAS_MULTIMEDIA_HINTS
186    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
187        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
188    }
189#endif
190}
191
192/*===========================================================================
193 * FUNCTION   : ~QCamera3HardwareInterface
194 *
195 * DESCRIPTION: destructor of QCamera3HardwareInterface
196 *
197 * PARAMETERS : none
198 *
199 * RETURN     : none
200 *==========================================================================*/
201QCamera3HardwareInterface::~QCamera3HardwareInterface()
202{
203    ALOGV("%s: E", __func__);
204    /* We need to stop all streams before deleting any stream */
205    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
206        it != mStreamInfo.end(); it++) {
207        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
208        if (channel)
209            channel->stop();
210    }
211    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
212        it != mStreamInfo.end(); it++) {
213        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
214        if (channel)
215            delete channel;
216        free (*it);
217    }
218
219    if (mJpegSettings != NULL) {
220        free(mJpegSettings);
221        mJpegSettings = NULL;
222    }
223
224    /* Clean up all channels */
225    if (mCameraInitialized) {
226        if (mMetadataChannel) {
227            mMetadataChannel->stop();
228            delete mMetadataChannel;
229            mMetadataChannel = NULL;
230        }
231        deinitParameters();
232    }
233
234    if (mCameraOpened)
235        closeCamera();
236
237    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
238        if (mDefaultMetadata[i])
239            free_camera_metadata(mDefaultMetadata[i]);
240
241    pthread_mutex_destroy(&mRequestLock);
242    pthread_cond_destroy(&mRequestCond);
243
244    pthread_mutex_destroy(&mMutex);
245    pthread_mutex_destroy(&mCaptureResultLock);
246    ALOGV("%s: X", __func__);
247}
248
249/*===========================================================================
250 * FUNCTION   : openCamera
251 *
252 * DESCRIPTION: open camera
253 *
254 * PARAMETERS :
255 *   @hw_device  : double ptr for camera device struct
256 *
257 * RETURN     : int32_t type of status
258 *              NO_ERROR  -- success
259 *              none-zero failure code
260 *==========================================================================*/
261int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
262{
263    int rc = 0;
264    pthread_mutex_lock(&mCameraSessionLock);
265    if (mCameraSessionActive) {
266        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
267        pthread_mutex_unlock(&mCameraSessionLock);
268        return INVALID_OPERATION;
269    }
270
271    if (mCameraOpened) {
272        *hw_device = NULL;
273        return PERMISSION_DENIED;
274    }
275
276    rc = openCamera();
277    if (rc == 0) {
278        *hw_device = &mCameraDevice.common;
279        mCameraSessionActive = 1;
280    } else
281        *hw_device = NULL;
282
283#ifdef HAS_MULTIMEDIA_HINTS
284    if (rc == 0) {
285        if (m_pPowerModule) {
286            if (m_pPowerModule->powerHint) {
287                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
288                        (void *)"state=1");
289            }
290        }
291    }
292#endif
293    pthread_mutex_unlock(&mCameraSessionLock);
294    return rc;
295}
296
297/*===========================================================================
298 * FUNCTION   : openCamera
299 *
300 * DESCRIPTION: open camera
301 *
302 * PARAMETERS : none
303 *
304 * RETURN     : int32_t type of status
305 *              NO_ERROR  -- success
306 *              none-zero failure code
307 *==========================================================================*/
308int QCamera3HardwareInterface::openCamera()
309{
310    if (mCameraHandle) {
311        ALOGE("Failure: Camera already opened");
312        return ALREADY_EXISTS;
313    }
314    mCameraHandle = camera_open(mCameraId);
315    if (!mCameraHandle) {
316        ALOGE("camera_open failed.");
317        return UNKNOWN_ERROR;
318    }
319
320    mCameraOpened = true;
321
322    return NO_ERROR;
323}
324
325/*===========================================================================
326 * FUNCTION   : closeCamera
327 *
328 * DESCRIPTION: close camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::closeCamera()
337{
338    int rc = NO_ERROR;
339
340    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
341    mCameraHandle = NULL;
342    mCameraOpened = false;
343
344#ifdef HAS_MULTIMEDIA_HINTS
345    if (rc == NO_ERROR) {
346        if (m_pPowerModule) {
347            if (m_pPowerModule->powerHint) {
348                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
349                        (void *)"state=0");
350            }
351        }
352    }
353#endif
354
355    return rc;
356}
357
358/*===========================================================================
359 * FUNCTION   : initialize
360 *
361 * DESCRIPTION: Initialize frameworks callback functions
362 *
363 * PARAMETERS :
364 *   @callback_ops : callback function to frameworks
365 *
366 * RETURN     :
367 *
368 *==========================================================================*/
369int QCamera3HardwareInterface::initialize(
370        const struct camera3_callback_ops *callback_ops)
371{
372    int rc;
373
374    pthread_mutex_lock(&mMutex);
375
376    rc = initParameters();
377    if (rc < 0) {
378        ALOGE("%s: initParamters failed %d", __func__, rc);
379       goto err1;
380    }
381    mCallbackOps = callback_ops;
382
383    pthread_mutex_unlock(&mMutex);
384    mCameraInitialized = true;
385    return 0;
386
387err1:
388    pthread_mutex_unlock(&mMutex);
389    return rc;
390}
391
392/*===========================================================================
393 * FUNCTION   : configureStreams
394 *
395 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
396 *              and output streams.
397 *
398 * PARAMETERS :
399 *   @stream_list : streams to be configured
400 *
401 * RETURN     :
402 *
403 *==========================================================================*/
404int QCamera3HardwareInterface::configureStreams(
405        camera3_stream_configuration_t *streamList)
406{
407    int rc = 0;
408    pthread_mutex_lock(&mMutex);
409
410    // Sanity check stream_list
411    if (streamList == NULL) {
412        ALOGE("%s: NULL stream configuration", __func__);
413        pthread_mutex_unlock(&mMutex);
414        return BAD_VALUE;
415    }
416
417    if (streamList->streams == NULL) {
418        ALOGE("%s: NULL stream list", __func__);
419        pthread_mutex_unlock(&mMutex);
420        return BAD_VALUE;
421    }
422
423    if (streamList->num_streams < 1) {
424        ALOGE("%s: Bad number of streams requested: %d", __func__,
425                streamList->num_streams);
426        pthread_mutex_unlock(&mMutex);
427        return BAD_VALUE;
428    }
429
430    camera3_stream_t *inputStream = NULL;
431    /* first invalidate all the steams in the mStreamList
432     * if they appear again, they will be validated */
433    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
434            it != mStreamInfo.end(); it++) {
435        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
436        channel->stop();
437        (*it)->status = INVALID;
438    }
439
440
441    for (size_t i = 0; i < streamList->num_streams; i++) {
442        camera3_stream_t *newStream = streamList->streams[i];
443        ALOGV("%s: newStream type = %d, stream format = %d",
444                __func__, newStream->stream_type, newStream->format);
445        //if the stream is in the mStreamList validate it
446        bool stream_exists = false;
447        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
448                it != mStreamInfo.end(); it++) {
449            if ((*it)->stream == newStream) {
450                QCamera3Channel *channel =
451                    (QCamera3Channel*)(*it)->stream->priv;
452                stream_exists = true;
453                (*it)->status = RECONFIGURE;
454                /*delete the channel object associated with the stream because
455                  we need to reconfigure*/
456                delete channel;
457                (*it)->stream->priv = NULL;
458            }
459        }
460        if (!stream_exists) {
461            //new stream
462            stream_info_t* stream_info;
463            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
464            stream_info->stream = newStream;
465            stream_info->status = VALID;
466            stream_info->registered = 0;
467            mStreamInfo.push_back(stream_info);
468        }
469        if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
470            if (inputStream != NULL) {
471                ALOGE("%s: Multiple input streams requested!", __func__);
472                pthread_mutex_unlock(&mMutex);
473                return BAD_VALUE;
474            }
475            inputStream = newStream;
476        }
477    }
478    mInputStream = inputStream;
479
480    /*clean up invalid streams*/
481    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
482            it != mStreamInfo.end();) {
483        if(((*it)->status) == INVALID){
484            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
485            delete channel;
486            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
487            free(*it);
488            it = mStreamInfo.erase(it);
489        } else {
490            it++;
491        }
492    }
493
494    if (mMetadataChannel) {
495        mMetadataChannel->stop();
496        delete mMetadataChannel;
497        mMetadataChannel = NULL;
498    }
499
500    //Create metadata channel and initialize it
501    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
502                    mCameraHandle->ops, captureResultCb,
503                    &gCamCapability[mCameraId]->padding_info, this);
504    if (mMetadataChannel == NULL) {
505        ALOGE("%s: failed to allocate metadata channel", __func__);
506        rc = -ENOMEM;
507        pthread_mutex_unlock(&mMutex);
508	return rc;
509    }
510    rc = mMetadataChannel->initialize();
511    if (rc < 0) {
512        ALOGE("%s: metadata channel initialization failed", __func__);
513        delete mMetadataChannel;
514        pthread_mutex_unlock(&mMutex);
515        return rc;
516    }
517
518    /* Allocate channel objects for the requested streams */
519    for (size_t i = 0; i < streamList->num_streams; i++) {
520        camera3_stream_t *newStream = streamList->streams[i];
521        if (newStream->priv == NULL) {
522            //New stream, construct channel
523            switch (newStream->stream_type) {
524            case CAMERA3_STREAM_INPUT:
525                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
526                break;
527            case CAMERA3_STREAM_BIDIRECTIONAL:
528                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
529                    GRALLOC_USAGE_HW_CAMERA_WRITE;
530                break;
531            case CAMERA3_STREAM_OUTPUT:
532                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
533                break;
534            default:
535                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
536                break;
537            }
538
539            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
540                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
541                QCamera3Channel *channel;
542                switch (newStream->format) {
543                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
544                case HAL_PIXEL_FORMAT_YCbCr_420_888:
545                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
546                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
547                            mCameraHandle->ops, captureResultCb,
548                            &gCamCapability[mCameraId]->padding_info, this, newStream);
549                    if (channel == NULL) {
550                        ALOGE("%s: allocation of channel failed", __func__);
551                        pthread_mutex_unlock(&mMutex);
552                        return -ENOMEM;
553                    }
554
555                    newStream->priv = channel;
556                    break;
557                case HAL_PIXEL_FORMAT_BLOB:
558                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
559                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
560                            mCameraHandle->ops, captureResultCb,
561                            &gCamCapability[mCameraId]->padding_info, this, newStream);
562                    if (channel == NULL) {
563                        ALOGE("%s: allocation of channel failed", __func__);
564                        pthread_mutex_unlock(&mMutex);
565                        return -ENOMEM;
566                    }
567                    newStream->priv = channel;
568                    break;
569
570                //TODO: Add support for app consumed format?
571                default:
572                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
573                    break;
574                }
575            }
576        } else {
577            // Channel already exists for this stream
578            // Do nothing for now
579        }
580    }
581    /*For the streams to be reconfigured we need to register the buffers
582      since the framework wont*/
583    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
584            it != mStreamInfo.end(); it++) {
585        if ((*it)->status == RECONFIGURE) {
586            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
587            /*only register buffers for streams that have already been
588              registered*/
589            if ((*it)->registered) {
590                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
591                        (*it)->buffer_set.buffers);
592                if (rc != NO_ERROR) {
593                    ALOGE("%s: Failed to register the buffers of old stream,\
594                            rc = %d", __func__, rc);
595                }
596                ALOGV("%s: channel %p has %d buffers",
597                        __func__, channel, (*it)->buffer_set.num_buffers);
598            }
599        }
600
601        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
602        if (index == NAME_NOT_FOUND) {
603            mPendingBuffersMap.add((*it)->stream, 0);
604        } else {
605            mPendingBuffersMap.editValueAt(index) = 0;
606        }
607    }
608
609    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
610    mPendingRequestsList.clear();
611
612    //settings/parameters don't carry over for new configureStreams
613    memset(mParameters, 0, sizeof(parm_buffer_t));
614    mFirstRequest = true;
615
616    pthread_mutex_unlock(&mMutex);
617    return rc;
618}
619
620/*===========================================================================
621 * FUNCTION   : validateCaptureRequest
622 *
623 * DESCRIPTION: validate a capture request from camera service
624 *
625 * PARAMETERS :
626 *   @request : request from framework to process
627 *
628 * RETURN     :
629 *
630 *==========================================================================*/
631int QCamera3HardwareInterface::validateCaptureRequest(
632                    camera3_capture_request_t *request)
633{
634    ssize_t idx = 0;
635    const camera3_stream_buffer_t *b;
636    CameraMetadata meta;
637
638    /* Sanity check the request */
639    if (request == NULL) {
640        ALOGE("%s: NULL capture request", __func__);
641        return BAD_VALUE;
642    }
643
644    uint32_t frameNumber = request->frame_number;
645    if (request->input_buffer != NULL &&
646            request->input_buffer->stream != mInputStream) {
647        ALOGE("%s: Request %d: Input buffer not from input stream!",
648                __FUNCTION__, frameNumber);
649        return BAD_VALUE;
650    }
651    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
652        ALOGE("%s: Request %d: No output buffers provided!",
653                __FUNCTION__, frameNumber);
654        return BAD_VALUE;
655    }
656    if (request->input_buffer != NULL) {
657        //TODO
658        ALOGE("%s: Not supporting input buffer yet", __func__);
659        return BAD_VALUE;
660    }
661
662    // Validate all buffers
663    b = request->output_buffers;
664    do {
665        QCamera3Channel *channel =
666                static_cast<QCamera3Channel*>(b->stream->priv);
667        if (channel == NULL) {
668            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
669                    __func__, frameNumber, idx);
670            return BAD_VALUE;
671        }
672        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
673            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
674                    __func__, frameNumber, idx);
675            return BAD_VALUE;
676        }
677        if (b->release_fence != -1) {
678            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
679                    __func__, frameNumber, idx);
680            return BAD_VALUE;
681        }
682        if (b->buffer == NULL) {
683            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
684                    __func__, frameNumber, idx);
685            return BAD_VALUE;
686        }
687        idx++;
688        b = request->output_buffers + idx;
689    } while (idx < (ssize_t)request->num_output_buffers);
690
691    return NO_ERROR;
692}
693
694/*===========================================================================
695 * FUNCTION   : registerStreamBuffers
696 *
697 * DESCRIPTION: Register buffers for a given stream with the HAL device.
698 *
699 * PARAMETERS :
700 *   @stream_list : streams to be configured
701 *
702 * RETURN     :
703 *
704 *==========================================================================*/
705int QCamera3HardwareInterface::registerStreamBuffers(
706        const camera3_stream_buffer_set_t *buffer_set)
707{
708    int rc = 0;
709
710    pthread_mutex_lock(&mMutex);
711
712    if (buffer_set == NULL) {
713        ALOGE("%s: Invalid buffer_set parameter.", __func__);
714        pthread_mutex_unlock(&mMutex);
715        return -EINVAL;
716    }
717    if (buffer_set->stream == NULL) {
718        ALOGE("%s: Invalid stream parameter.", __func__);
719        pthread_mutex_unlock(&mMutex);
720        return -EINVAL;
721    }
722    if (buffer_set->num_buffers < 1) {
723        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
724        pthread_mutex_unlock(&mMutex);
725        return -EINVAL;
726    }
727    if (buffer_set->buffers == NULL) {
728        ALOGE("%s: Invalid buffers parameter.", __func__);
729        pthread_mutex_unlock(&mMutex);
730        return -EINVAL;
731    }
732
733    camera3_stream_t *stream = buffer_set->stream;
734    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
735
736    //set the buffer_set in the mStreamInfo array
737    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
738            it != mStreamInfo.end(); it++) {
739        if ((*it)->stream == stream) {
740            uint32_t numBuffers = buffer_set->num_buffers;
741            (*it)->buffer_set.stream = buffer_set->stream;
742            (*it)->buffer_set.num_buffers = numBuffers;
743            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
744            if ((*it)->buffer_set.buffers == NULL) {
745                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
746                pthread_mutex_unlock(&mMutex);
747                return -ENOMEM;
748            }
749            for (size_t j = 0; j < numBuffers; j++){
750                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
751            }
752            (*it)->registered = 1;
753        }
754    }
755
756    if (stream->stream_type != CAMERA3_STREAM_OUTPUT) {
757        ALOGE("%s: not yet support non output type stream", __func__);
758        pthread_mutex_unlock(&mMutex);
759        return -EINVAL;
760    }
761    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
762    if (rc < 0) {
763        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
764        pthread_mutex_unlock(&mMutex);
765        return -ENODEV;
766    }
767
768    pthread_mutex_unlock(&mMutex);
769    return NO_ERROR;
770}
771
772/*===========================================================================
773 * FUNCTION   : processCaptureRequest
774 *
775 * DESCRIPTION: process a capture request from camera service
776 *
777 * PARAMETERS :
778 *   @request : request from framework to process
779 *
780 * RETURN     :
781 *
782 *==========================================================================*/
783int QCamera3HardwareInterface::processCaptureRequest(
784                    camera3_capture_request_t *request)
785{
786    int rc = NO_ERROR;
787    int32_t request_id;
788    CameraMetadata meta;
789
790    pthread_mutex_lock(&mMutex);
791
792    rc = validateCaptureRequest(request);
793    if (rc != NO_ERROR) {
794        ALOGE("%s: incoming request is not valid", __func__);
795        pthread_mutex_unlock(&mMutex);
796        return rc;
797    }
798
799    uint32_t frameNumber = request->frame_number;
800
801    rc = setFrameParameters(request->frame_number, request->settings);
802    if (rc < 0) {
803        ALOGE("%s: fail to set frame parameters", __func__);
804        pthread_mutex_unlock(&mMutex);
805        return rc;
806    }
807
808    meta = request->settings;
809    if (meta.exists(ANDROID_REQUEST_ID)) {
810        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
811        mCurrentRequestId = request_id;
812        ALOGV("%s: Received request with id: %d",__func__, request_id);
813    } else if (mFirstRequest || mCurrentRequestId == -1){
814        ALOGE("%s: Unable to find request id field, \
815                & no previous id available", __func__);
816        return NAME_NOT_FOUND;
817    } else {
818        ALOGV("%s: Re-using old request id", __func__);
819        request_id = mCurrentRequestId;
820    }
821
822
823    ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__,
824                                    request->num_output_buffers);
825    // Acquire all request buffers first
826    for (size_t i = 0; i < request->num_output_buffers; i++) {
827        const camera3_stream_buffer_t& output = request->output_buffers[i];
828        sp<Fence> acquireFence = new Fence(output.acquire_fence);
829
830        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
831        //Call function to store local copy of jpeg data for encode params.
832            rc = getJpegSettings(request->settings);
833            if (rc < 0) {
834                ALOGE("%s: failed to get jpeg parameters", __func__);
835                pthread_mutex_unlock(&mMutex);
836                return rc;
837            }
838        }
839
840        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
841        if (rc != OK) {
842            ALOGE("%s: fence wait failed %d", __func__, rc);
843            pthread_mutex_unlock(&mMutex);
844            return rc;
845        }
846    }
847
848    /* Update pending request list and pending buffers map */
849    pthread_mutex_lock(&mRequestLock);
850    PendingRequestInfo pendingRequest;
851    pendingRequest.frame_number = frameNumber;
852    pendingRequest.num_buffers = request->num_output_buffers;
853    pendingRequest.request_id = request_id;
854
855    for (size_t i = 0; i < request->num_output_buffers; i++) {
856        RequestedBufferInfo requestedBuf;
857        requestedBuf.stream = request->output_buffers[i].stream;
858        requestedBuf.buffer = NULL;
859        pendingRequest.buffers.push_back(requestedBuf);
860
861        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
862    }
863    mPendingRequestsList.push_back(pendingRequest);
864    pthread_mutex_unlock(&mRequestLock);
865
866    // Notify metadata channel we receive a request
867    mMetadataChannel->request(NULL, frameNumber);
868
869    // Call request on other streams
870    for (size_t i = 0; i < request->num_output_buffers; i++) {
871        const camera3_stream_buffer_t& output = request->output_buffers[i];
872        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
873
874        if (channel == NULL) {
875            ALOGE("%s: invalid channel pointer for stream", __func__);
876            continue;
877        }
878
879        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
880            rc = channel->request(output.buffer, frameNumber, mJpegSettings);
881        } else {
882            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
883                __LINE__, output.buffer, frameNumber);
884            rc = channel->request(output.buffer, frameNumber);
885        }
886        if (rc < 0)
887            ALOGE("%s: request failed", __func__);
888    }
889
890    mFirstRequest = false;
891
892    //Block on conditional variable
893    pthread_mutex_lock(&mRequestLock);
894    mPendingRequest = 1;
895    while (mPendingRequest == 1) {
896        pthread_cond_wait(&mRequestCond, &mRequestLock);
897    }
898    pthread_mutex_unlock(&mRequestLock);
899
900    pthread_mutex_unlock(&mMutex);
901    return rc;
902}
903
904/*===========================================================================
905 * FUNCTION   : getMetadataVendorTagOps
906 *
907 * DESCRIPTION:
908 *
909 * PARAMETERS :
910 *
911 *
912 * RETURN     :
913 *==========================================================================*/
914void QCamera3HardwareInterface::getMetadataVendorTagOps(
915                    vendor_tag_query_ops_t* /*ops*/)
916{
917    /* Enable locks when we eventually add Vendor Tags */
918    /*
919    pthread_mutex_lock(&mMutex);
920
921    pthread_mutex_unlock(&mMutex);
922    */
923    return;
924}
925
926/*===========================================================================
927 * FUNCTION   : dump
928 *
929 * DESCRIPTION:
930 *
931 * PARAMETERS :
932 *
933 *
934 * RETURN     :
935 *==========================================================================*/
936void QCamera3HardwareInterface::dump(int /*fd*/)
937{
938    /*Enable lock when we implement this function*/
939    /*
940    pthread_mutex_lock(&mMutex);
941
942    pthread_mutex_unlock(&mMutex);
943    */
944    return;
945}
946
947/*===========================================================================
948 * FUNCTION   : captureResultCb
949 *
950 * DESCRIPTION: Callback handler for all capture result
951 *              (streams, as well as metadata)
952 *
953 * PARAMETERS :
954 *   @metadata : metadata information
955 *   @buffer   : actual gralloc buffer to be returned to frameworks.
956 *               NULL if metadata.
957 *
958 * RETURN     : NONE
959 *==========================================================================*/
960void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
961                camera3_stream_buffer_t *buffer, uint32_t frame_number)
962{
963    pthread_mutex_lock(&mRequestLock);
964
965    if (metadata_buf) {
966        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
967        int32_t frame_number_valid = *(int32_t *)
968            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
969        uint32_t frame_number = *(uint32_t *)
970            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
971        const struct timeval *tv = (const struct timeval *)
972            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
973        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
974            tv->tv_usec * NSEC_PER_USEC;
975
976        if (!frame_number_valid) {
977            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
978            mMetadataChannel->bufDone(metadata_buf);
979            goto done_metadata;
980        }
981        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
982                frame_number, capture_time);
983
984        // Go through the pending requests info and send shutter/results to frameworks
985        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
986                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
987            camera3_capture_result_t result;
988            camera3_notify_msg_t notify_msg;
989            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
990
991            // Flush out all entries with less or equal frame numbers.
992
993            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
994            //Right now it's the same as metadata timestamp
995
996            //TODO: When there is metadata drop, how do we derive the timestamp of
997            //dropped frames? For now, we fake the dropped timestamp by substracting
998            //from the reported timestamp
999            nsecs_t current_capture_time = capture_time -
1000                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1001
1002            // Send shutter notify to frameworks
1003            notify_msg.type = CAMERA3_MSG_SHUTTER;
1004            notify_msg.message.shutter.frame_number = i->frame_number;
1005            notify_msg.message.shutter.timestamp = current_capture_time;
1006            mCallbackOps->notify(mCallbackOps, &notify_msg);
1007            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1008                    i->frame_number, capture_time);
1009
1010            // Send empty metadata with already filled buffers for dropped metadata
1011            // and send valid metadata with already filled buffers for current metadata
1012            if (i->frame_number < frame_number) {
1013                CameraMetadata dummyMetadata;
1014                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1015                        &current_capture_time, 1);
1016                dummyMetadata.update(ANDROID_REQUEST_ID,
1017                        &(i->request_id), 1);
1018                result.result = dummyMetadata.release();
1019            } else {
1020                result.result = translateCbMetadataToResultMetadata(metadata,
1021                        current_capture_time, i->request_id);
1022                // Return metadata buffer
1023                mMetadataChannel->bufDone(metadata_buf);
1024            }
1025            if (!result.result) {
1026                ALOGE("%s: metadata is NULL", __func__);
1027            }
1028            result.frame_number = i->frame_number;
1029            result.num_output_buffers = 0;
1030            result.output_buffers = NULL;
1031            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1032                    j != i->buffers.end(); j++) {
1033                if (j->buffer) {
1034                    result.num_output_buffers++;
1035                }
1036            }
1037
1038            if (result.num_output_buffers > 0) {
1039                camera3_stream_buffer_t *result_buffers =
1040                    new camera3_stream_buffer_t[result.num_output_buffers];
1041                if (!result_buffers) {
1042                    ALOGE("%s: Fatal error: out of memory", __func__);
1043                }
1044                size_t result_buffers_idx = 0;
1045                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1046                        j != i->buffers.end(); j++) {
1047                    if (j->buffer) {
1048                        result_buffers[result_buffers_idx++] = *(j->buffer);
1049                        free(j->buffer);
1050                        j->buffer = NULL;
1051                        mPendingBuffersMap.editValueFor(j->stream)--;
1052                    }
1053                }
1054                result.output_buffers = result_buffers;
1055
1056                mCallbackOps->process_capture_result(mCallbackOps, &result);
1057                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1058                        __func__, result.frame_number, current_capture_time);
1059                free_camera_metadata((camera_metadata_t *)result.result);
1060                delete[] result_buffers;
1061            } else {
1062                mCallbackOps->process_capture_result(mCallbackOps, &result);
1063                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1064                        __func__, result.frame_number, current_capture_time);
1065                free_camera_metadata((camera_metadata_t *)result.result);
1066            }
1067            // erase the element from the list
1068            i = mPendingRequestsList.erase(i);
1069        }
1070
1071
1072done_metadata:
1073        bool max_buffers_dequeued = false;
1074        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1075            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1076            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1077            if (queued_buffers == stream->max_buffers) {
1078                max_buffers_dequeued = true;
1079                break;
1080            }
1081        }
1082        if (!max_buffers_dequeued) {
1083            // Unblock process_capture_request
1084            mPendingRequest = 0;
1085            pthread_cond_signal(&mRequestCond);
1086        }
1087    } else {
1088        // If the frame number doesn't exist in the pending request list,
1089        // directly send the buffer to the frameworks, and update pending buffers map
1090        // Otherwise, book-keep the buffer.
1091        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1092        while (i != mPendingRequestsList.end() && i->frame_number != frame_number)
1093            i++;
1094        if (i == mPendingRequestsList.end()) {
1095            // Verify all pending requests frame_numbers are greater
1096            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1097                    j != mPendingRequestsList.end(); j++) {
1098                if (j->frame_number < frame_number) {
1099                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1100                            __func__, j->frame_number, frame_number);
1101                }
1102            }
1103            camera3_capture_result_t result;
1104            result.result = NULL;
1105            result.frame_number = frame_number;
1106            result.num_output_buffers = 1;
1107            result.output_buffers = buffer;
1108            ALOGV("%s: result frame_number = %d, buffer = %p",
1109                    __func__, frame_number, buffer);
1110            mPendingBuffersMap.editValueFor(buffer->stream)--;
1111            mCallbackOps->process_capture_result(mCallbackOps, &result);
1112        } else {
1113            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1114                    j != i->buffers.end(); j++) {
1115                if (j->stream == buffer->stream) {
1116                    if (j->buffer != NULL) {
1117                        ALOGE("%s: Error: buffer is already set", __func__);
1118                    } else {
1119                        j->buffer = (camera3_stream_buffer_t *)malloc(
1120                                sizeof(camera3_stream_buffer_t));
1121                        *(j->buffer) = *buffer;
1122                        ALOGV("%s: cache buffer %p at result frame_number %d",
1123                                __func__, buffer, frame_number);
1124                    }
1125                }
1126            }
1127        }
1128    }
1129
1130    pthread_mutex_unlock(&mRequestLock);
1131    return;
1132}
1133
1134/*===========================================================================
1135 * FUNCTION   : translateCbMetadataToResultMetadata
1136 *
1137 * DESCRIPTION:
1138 *
1139 * PARAMETERS :
1140 *   @metadata : metadata information from callback
1141 *
1142 * RETURN     : camera_metadata_t*
1143 *              metadata in a format specified by fwk
1144 *==========================================================================*/
1145camera_metadata_t*
1146QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1147                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1148                                 int32_t request_id)
1149{
1150    CameraMetadata camMetadata;
1151    camera_metadata_t* resultMetadata;
1152
1153    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1154    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1155
1156    /*CAM_INTF_META_HISTOGRAM - TODO*/
1157    /*cam_hist_stats_t  *histogram =
1158      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1159      metadata);*/
1160
1161    /*face detection*/
1162    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1163        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1164    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1165    int32_t faceIds[numFaces];
1166    uint8_t faceScores[numFaces];
1167    int32_t faceRectangles[numFaces * 4];
1168    int32_t faceLandmarks[numFaces * 6];
1169    int j = 0, k = 0;
1170    for (int i = 0; i < numFaces; i++) {
1171        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1172        faceScores[i] = faceDetectionInfo->faces[i].score;
1173        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1174                faceRectangles+j, -1);
1175        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1176        j+= 4;
1177        k+= 6;
1178    }
1179    if (numFaces > 0) {
1180        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1181        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1182        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1183            faceRectangles, numFaces*4);
1184        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1185            faceLandmarks, numFaces*6);
1186    }
1187
1188    uint8_t  *color_correct_mode =
1189        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1190    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1191
1192    int32_t  *ae_precapture_id =
1193        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1194    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1195
1196    /*aec regions*/
1197    cam_area_t  *hAeRegions =
1198        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1199    int32_t aeRegions[5];
1200    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1201    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1202
1203    uint8_t  *ae_state =
1204        (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1205    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1206
1207    uint8_t  *focusMode =
1208        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1209    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1210
1211    /*af regions*/
1212    cam_area_t  *hAfRegions =
1213        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1214    int32_t afRegions[5];
1215    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1216    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1217
1218    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1219    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1220
1221    int32_t  *afTriggerId =
1222        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1223    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1224
1225    uint8_t  *whiteBalance =
1226        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1227    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1228
1229    /*awb regions*/
1230    cam_area_t  *hAwbRegions =
1231        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1232    int32_t awbRegions[5];
1233    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1234    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1235
1236    uint8_t  *whiteBalanceState =
1237        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1238    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1239
1240    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1241    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1242
1243    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1244    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1245
1246    uint8_t  *flashPower =
1247        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1248    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1249
1250    int64_t  *flashFiringTime =
1251        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1252    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1253
1254    /*int32_t  *ledMode =
1255      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1256      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1257
1258    uint8_t  *flashState =
1259        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1260    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1261
1262    uint8_t  *hotPixelMode =
1263        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1264    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1265
1266    float  *lensAperture =
1267        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1268    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1269
1270    float  *filterDensity =
1271        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1272    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1273
1274    float  *focalLength =
1275        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1276    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1277
1278    float  *focusDistance =
1279        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1280    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1281
1282    float  *focusRange =
1283        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1284    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1285
1286    uint8_t  *opticalStab =
1287        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1288    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1289
1290    /*int32_t  *focusState =
1291      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1292      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1293
1294    uint8_t  *noiseRedMode =
1295        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1296    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1297
1298    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1299
1300    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1301        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1302    int32_t scalerCropRegion[4];
1303    scalerCropRegion[0] = hScalerCropRegion->left;
1304    scalerCropRegion[1] = hScalerCropRegion->top;
1305    scalerCropRegion[2] = hScalerCropRegion->width;
1306    scalerCropRegion[3] = hScalerCropRegion->height;
1307    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1308
1309    int64_t  *sensorExpTime =
1310        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1311    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1312
1313    int64_t  *sensorFameDuration =
1314        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1315    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1316
1317    int32_t  *sensorSensitivity =
1318        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1319    mMetadataResponse.iso_speed = *sensorSensitivity;
1320    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1321
1322    uint8_t  *shadingMode =
1323        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1324    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1325
1326    uint8_t  *faceDetectMode =
1327        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1328    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1329
1330    uint8_t  *histogramMode =
1331        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1332    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1333
1334    uint8_t  *sharpnessMapMode =
1335        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1336    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1337            sharpnessMapMode, 1);
1338
1339    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1340    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1341        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1342    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1343            (int32_t*)sharpnessMap->sharpness,
1344            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1345
1346    resultMetadata = camMetadata.release();
1347    return resultMetadata;
1348}
1349
1350/*===========================================================================
1351 * FUNCTION   : convertToRegions
1352 *
1353 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1354 *
1355 * PARAMETERS :
1356 *   @rect   : cam_rect_t struct to convert
1357 *   @region : int32_t destination array
1358 *   @weight : if we are converting from cam_area_t, weight is valid
1359 *             else weight = -1
1360 *
1361 *==========================================================================*/
1362void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1363    region[0] = rect.left;
1364    region[1] = rect.top;
1365    region[2] = rect.left + rect.width;
1366    region[3] = rect.top + rect.height;
1367    if (weight > -1) {
1368        region[4] = weight;
1369    }
1370}
1371
1372/*===========================================================================
1373 * FUNCTION   : convertFromRegions
1374 *
1375 * DESCRIPTION: helper method to convert from array to cam_rect_t
1376 *
1377 * PARAMETERS :
1378 *   @rect   : cam_rect_t struct to convert
1379 *   @region : int32_t destination array
1380 *   @weight : if we are converting from cam_area_t, weight is valid
1381 *             else weight = -1
1382 *
1383 *==========================================================================*/
1384void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1385                                                   const camera_metadata_t *settings,
1386                                                   uint32_t tag){
1387    CameraMetadata frame_settings;
1388    frame_settings = settings;
1389    int32_t x_min = frame_settings.find(tag).data.i32[0];
1390    int32_t y_min = frame_settings.find(tag).data.i32[1];
1391    int32_t x_max = frame_settings.find(tag).data.i32[2];
1392    int32_t y_max = frame_settings.find(tag).data.i32[3];
1393    roi->weight = frame_settings.find(tag).data.i32[4];
1394    roi->rect.left = x_min;
1395    roi->rect.top = y_min;
1396    roi->rect.width = x_max - x_min;
1397    roi->rect.height = y_max - y_min;
1398}
1399
1400/*===========================================================================
1401 * FUNCTION   : resetIfNeededROI
1402 *
1403 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1404 *              crop region
1405 *
1406 * PARAMETERS :
1407 *   @roi       : cam_area_t struct to resize
1408 *   @scalerCropRegion : cam_crop_region_t region to compare against
1409 *
1410 *
1411 *==========================================================================*/
1412bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1413                                                 const cam_crop_region_t* scalerCropRegion)
1414{
1415    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1416    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1417    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1418    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1419    if ((roi_x_max < scalerCropRegion->left) ||
1420        (roi_y_max < scalerCropRegion->top)  ||
1421        (roi->rect.left > crop_x_max) ||
1422        (roi->rect.top > crop_y_max)){
1423        return false;
1424    }
1425    if (roi->rect.left < scalerCropRegion->left) {
1426        roi->rect.left = scalerCropRegion->left;
1427    }
1428    if (roi->rect.top < scalerCropRegion->top) {
1429        roi->rect.top = scalerCropRegion->top;
1430    }
1431    if (roi_x_max > crop_x_max) {
1432        roi_x_max = crop_x_max;
1433    }
1434    if (roi_y_max > crop_y_max) {
1435        roi_y_max = crop_y_max;
1436    }
1437    roi->rect.width = roi_x_max - roi->rect.left;
1438    roi->rect.height = roi_y_max - roi->rect.top;
1439    return true;
1440}
1441
1442/*===========================================================================
1443 * FUNCTION   : convertLandmarks
1444 *
1445 * DESCRIPTION: helper method to extract the landmarks from face detection info
1446 *
1447 * PARAMETERS :
1448 *   @face   : cam_rect_t struct to convert
1449 *   @landmarks : int32_t destination array
1450 *
1451 *
1452 *==========================================================================*/
1453void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1454{
1455    landmarks[0] = face.left_eye_center.x;
1456    landmarks[1] = face.left_eye_center.y;
1457    landmarks[2] = face.right_eye_center.y;
1458    landmarks[3] = face.right_eye_center.y;
1459    landmarks[4] = face.mouth_center.x;
1460    landmarks[5] = face.mouth_center.y;
1461}
1462
1463#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1464/*===========================================================================
1465 * FUNCTION   : initCapabilities
1466 *
1467 * DESCRIPTION: initialize camera capabilities in static data struct
1468 *
1469 * PARAMETERS :
1470 *   @cameraId  : camera Id
1471 *
1472 * RETURN     : int32_t type of status
1473 *              NO_ERROR  -- success
1474 *              none-zero failure code
1475 *==========================================================================*/
1476int QCamera3HardwareInterface::initCapabilities(int cameraId)
1477{
1478    int rc = 0;
1479    mm_camera_vtbl_t *cameraHandle = NULL;
1480    QCamera3HeapMemory *capabilityHeap = NULL;
1481
1482    cameraHandle = camera_open(cameraId);
1483    if (!cameraHandle) {
1484        ALOGE("%s: camera_open failed", __func__);
1485        rc = -1;
1486        goto open_failed;
1487    }
1488
1489    capabilityHeap = new QCamera3HeapMemory();
1490    if (capabilityHeap == NULL) {
1491        ALOGE("%s: creation of capabilityHeap failed", __func__);
1492        goto heap_creation_failed;
1493    }
1494    /* Allocate memory for capability buffer */
1495    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1496    if(rc != OK) {
1497        ALOGE("%s: No memory for cappability", __func__);
1498        goto allocate_failed;
1499    }
1500
1501    /* Map memory for capability buffer */
1502    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1503    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1504                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1505                                capabilityHeap->getFd(0),
1506                                sizeof(cam_capability_t));
1507    if(rc < 0) {
1508        ALOGE("%s: failed to map capability buffer", __func__);
1509        goto map_failed;
1510    }
1511
1512    /* Query Capability */
1513    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1514    if(rc < 0) {
1515        ALOGE("%s: failed to query capability",__func__);
1516        goto query_failed;
1517    }
1518    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1519    if (!gCamCapability[cameraId]) {
1520        ALOGE("%s: out of memory", __func__);
1521        goto query_failed;
1522    }
1523    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1524                                        sizeof(cam_capability_t));
1525    rc = 0;
1526
1527query_failed:
1528    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1529                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1530map_failed:
1531    capabilityHeap->deallocate();
1532allocate_failed:
1533    delete capabilityHeap;
1534heap_creation_failed:
1535    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1536    cameraHandle = NULL;
1537open_failed:
1538    return rc;
1539}
1540
1541/*===========================================================================
1542 * FUNCTION   : initParameters
1543 *
1544 * DESCRIPTION: initialize camera parameters
1545 *
1546 * PARAMETERS :
1547 *
1548 * RETURN     : int32_t type of status
1549 *              NO_ERROR  -- success
1550 *              none-zero failure code
1551 *==========================================================================*/
1552int QCamera3HardwareInterface::initParameters()
1553{
1554    int rc = 0;
1555
1556    //Allocate Set Param Buffer
1557    mParamHeap = new QCamera3HeapMemory();
1558    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1559    if(rc != OK) {
1560        rc = NO_MEMORY;
1561        ALOGE("Failed to allocate SETPARM Heap memory");
1562        delete mParamHeap;
1563        mParamHeap = NULL;
1564        return rc;
1565    }
1566
1567    //Map memory for parameters buffer
1568    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1569            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1570            mParamHeap->getFd(0),
1571            sizeof(parm_buffer_t));
1572    if(rc < 0) {
1573        ALOGE("%s:failed to map SETPARM buffer",__func__);
1574        rc = FAILED_TRANSACTION;
1575        mParamHeap->deallocate();
1576        delete mParamHeap;
1577        mParamHeap = NULL;
1578        return rc;
1579    }
1580
1581    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1582    return rc;
1583}
1584
1585/*===========================================================================
1586 * FUNCTION   : deinitParameters
1587 *
1588 * DESCRIPTION: de-initialize camera parameters
1589 *
1590 * PARAMETERS :
1591 *
1592 * RETURN     : NONE
1593 *==========================================================================*/
1594void QCamera3HardwareInterface::deinitParameters()
1595{
1596    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1597            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1598
1599    mParamHeap->deallocate();
1600    delete mParamHeap;
1601    mParamHeap = NULL;
1602
1603    mParameters = NULL;
1604}
1605
1606/*===========================================================================
1607 * FUNCTION   : calcMaxJpegSize
1608 *
1609 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1610 *
1611 * PARAMETERS :
1612 *
1613 * RETURN     : max_jpeg_size
1614 *==========================================================================*/
1615int QCamera3HardwareInterface::calcMaxJpegSize()
1616{
1617    int32_t max_jpeg_size = 0;
1618    int temp_width, temp_height;
1619    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1620        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1621        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1622        if (temp_width * temp_height > max_jpeg_size ) {
1623            max_jpeg_size = temp_width * temp_height;
1624        }
1625    }
1626    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1627    return max_jpeg_size;
1628}
1629
1630/*===========================================================================
1631 * FUNCTION   : initStaticMetadata
1632 *
1633 * DESCRIPTION: initialize the static metadata
1634 *
1635 * PARAMETERS :
1636 *   @cameraId  : camera Id
1637 *
1638 * RETURN     : int32_t type of status
1639 *              0  -- success
1640 *              non-zero failure code
1641 *==========================================================================*/
1642int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1643{
1644    int rc = 0;
1645    CameraMetadata staticInfo;
1646    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1647    /*HAL 3 only*/
1648    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1649                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1650
1651    /*hard coded for now but this should come from sensor*/
1652    float min_focus_distance;
1653    if(facingBack){
1654        min_focus_distance = 10;
1655    } else {
1656        min_focus_distance = 0;
1657    }
1658    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1659                    &min_focus_distance, 1);
1660
1661    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1662                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1663
1664    /*should be using focal lengths but sensor doesn't provide that info now*/
1665    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1666                      &gCamCapability[cameraId]->focal_length,
1667                      1);
1668
1669    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1670                      gCamCapability[cameraId]->apertures,
1671                      gCamCapability[cameraId]->apertures_count);
1672
1673    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1674                gCamCapability[cameraId]->filter_densities,
1675                gCamCapability[cameraId]->filter_densities_count);
1676
1677
1678    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1679                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1680                      gCamCapability[cameraId]->optical_stab_modes_count);
1681
1682    staticInfo.update(ANDROID_LENS_POSITION,
1683                      gCamCapability[cameraId]->lens_position,
1684                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1685
1686    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1687                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1688    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1689                      lens_shading_map_size,
1690                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1691
1692    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map,
1693            sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float));
1694
1695    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1696                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1697    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1698            geo_correction_map_size,
1699            sizeof(geo_correction_map_size)/sizeof(int32_t));
1700
1701    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1702                       gCamCapability[cameraId]->geo_correction_map,
1703                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1704
1705    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1706            gCamCapability[cameraId]->sensor_physical_size, 2);
1707
1708    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1709            gCamCapability[cameraId]->exposure_time_range, 2);
1710
1711    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1712            &gCamCapability[cameraId]->max_frame_duration, 1);
1713
1714
1715    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1716                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1717
1718    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1719                                               gCamCapability[cameraId]->pixel_array_size.height};
1720    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1721                      pixel_array_size, 2);
1722
1723    int32_t active_array_size[] = {0, 0,
1724                                                gCamCapability[cameraId]->active_array_size.width,
1725                                                gCamCapability[cameraId]->active_array_size.height};
1726
1727    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1728                      active_array_size, 4);
1729
1730    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1731            &gCamCapability[cameraId]->white_level, 1);
1732
1733    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1734            gCamCapability[cameraId]->black_level_pattern, 4);
1735
1736    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1737                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1738
1739    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1740                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1741
1742    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1743                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1744    /*hardcode 0 for now*/
1745    int32_t max_face_count = 0;
1746    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1747                      &max_face_count, 1);
1748
1749    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1750                      &gCamCapability[cameraId]->histogram_size, 1);
1751
1752    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1753            &gCamCapability[cameraId]->max_histogram_count, 1);
1754
1755    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1756                                                gCamCapability[cameraId]->sharpness_map_size.height};
1757
1758    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1759            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1760
1761    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1762            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1763
1764
1765    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1766                      &gCamCapability[cameraId]->raw_min_duration,
1767                       1);
1768
1769    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1770                                                HAL_PIXEL_FORMAT_BLOB};
1771    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1772    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1773                      scalar_formats,
1774                      scalar_formats_count);
1775
1776    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1777    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1778              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1779              available_processed_sizes);
1780    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1781                available_processed_sizes,
1782                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1783
1784    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1785    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1786                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1787                 available_fps_ranges);
1788    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1789            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1790
1791    camera_metadata_rational exposureCompensationStep = {
1792            gCamCapability[cameraId]->exp_compensation_step.numerator,
1793            gCamCapability[cameraId]->exp_compensation_step.denominator};
1794    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1795                      &exposureCompensationStep, 1);
1796
1797    /*TO DO*/
1798    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1799    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1800                      availableVstabModes, sizeof(availableVstabModes));
1801
1802    /*HAL 1 and HAL 3 common*/
1803    float maxZoom = 4;
1804    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1805            &maxZoom, 1);
1806
1807    int32_t max3aRegions = 1;
1808    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1809            &max3aRegions, 1);
1810
1811    uint8_t availableFaceDetectModes[] = {
1812            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1813    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1814                      availableFaceDetectModes,
1815                      sizeof(availableFaceDetectModes));
1816
1817    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1818                                       gCamCapability[cameraId]->raw_dim.height};
1819    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1820                      raw_size,
1821                      sizeof(raw_size)/sizeof(uint32_t));
1822
1823    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1824                                                        gCamCapability[cameraId]->exposure_compensation_max};
1825    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1826            exposureCompensationRange,
1827            sizeof(exposureCompensationRange)/sizeof(int32_t));
1828
1829    uint8_t lensFacing = (facingBack) ?
1830            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1831    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1832
1833    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1834    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1835              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1836              available_jpeg_sizes);
1837    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1838                available_jpeg_sizes,
1839                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1840
1841    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1842                      available_thumbnail_sizes,
1843                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1844
1845    int32_t max_jpeg_size = 0;
1846    int temp_width, temp_height;
1847    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1848        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1849        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1850        if (temp_width * temp_height > max_jpeg_size ) {
1851            max_jpeg_size = temp_width * temp_height;
1852        }
1853    }
1854    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1855    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1856                      &max_jpeg_size, 1);
1857
1858    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1859    int32_t size = 0;
1860    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1861        int val = lookupFwkName(EFFECT_MODES_MAP,
1862                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1863                                   gCamCapability[cameraId]->supported_effects[i]);
1864        if (val != NAME_NOT_FOUND) {
1865            avail_effects[size] = (uint8_t)val;
1866            size++;
1867        }
1868    }
1869    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1870                      avail_effects,
1871                      size);
1872
1873    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1874    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1875    int32_t supported_scene_modes_cnt = 0;
1876    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1877        int val = lookupFwkName(SCENE_MODES_MAP,
1878                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1879                                gCamCapability[cameraId]->supported_scene_modes[i]);
1880        if (val != NAME_NOT_FOUND) {
1881            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1882            supported_indexes[supported_scene_modes_cnt] = i;
1883            supported_scene_modes_cnt++;
1884        }
1885    }
1886
1887    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1888                      avail_scene_modes,
1889                      supported_scene_modes_cnt);
1890
1891    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1892    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1893                      supported_scene_modes_cnt,
1894                      scene_mode_overrides,
1895                      supported_indexes,
1896                      cameraId);
1897    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1898                      scene_mode_overrides,
1899                      supported_scene_modes_cnt*3);
1900
1901    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1902    size = 0;
1903    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1904        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1905                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1906                                 gCamCapability[cameraId]->supported_antibandings[i]);
1907        if (val != NAME_NOT_FOUND) {
1908            avail_antibanding_modes[size] = (uint8_t)val;
1909            size++;
1910        }
1911
1912    }
1913    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1914                      avail_antibanding_modes,
1915                      size);
1916
1917    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1918    size = 0;
1919    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1920        int val = lookupFwkName(FOCUS_MODES_MAP,
1921                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1922                                gCamCapability[cameraId]->supported_focus_modes[i]);
1923        if (val != NAME_NOT_FOUND) {
1924            avail_af_modes[size] = (uint8_t)val;
1925            size++;
1926        }
1927    }
1928    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1929                      avail_af_modes,
1930                      size);
1931
1932    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1933    size = 0;
1934    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1935        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1936                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1937                                    gCamCapability[cameraId]->supported_white_balances[i]);
1938        if (val != NAME_NOT_FOUND) {
1939            avail_awb_modes[size] = (uint8_t)val;
1940            size++;
1941        }
1942    }
1943    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1944                      avail_awb_modes,
1945                      size);
1946
1947    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1948    size = 0;
1949    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1950        int val = lookupFwkName(FLASH_MODES_MAP,
1951                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1952                                gCamCapability[cameraId]->supported_flash_modes[i]);
1953        if (val != NAME_NOT_FOUND) {
1954            avail_flash_modes[size] = (uint8_t)val;
1955            size++;
1956        }
1957    }
1958    static uint8_t flashAvailable = 0;
1959    if (size > 1) {
1960        //flash is supported
1961        flashAvailable = 1;
1962    }
1963    staticInfo.update(ANDROID_FLASH_MODE,
1964                      avail_flash_modes,
1965                      size);
1966
1967    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
1968            &flashAvailable, 1);
1969
1970    uint8_t avail_ae_modes[5];
1971    size = 0;
1972    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
1973        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
1974        size++;
1975    }
1976    if (flashAvailable) {
1977        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
1978        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
1979        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
1980    }
1981    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1982                      avail_ae_modes,
1983                      size);
1984    size = 0;
1985    int32_t avail_sensitivities[CAM_ISO_MODE_MAX];
1986    for (int i = 0; i < gCamCapability[cameraId]->supported_iso_modes_cnt; i++) {
1987        int32_t sensitivity = getSensorSensitivity(gCamCapability[cameraId]->supported_iso_modes[i]);
1988        if (sensitivity != -1) {
1989            avail_sensitivities[size] = sensitivity;
1990            size++;
1991        }
1992    }
1993    staticInfo.update(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES,
1994                      avail_sensitivities,
1995                      size);
1996
1997    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
1998                      &gCamCapability[cameraId]->max_analog_sensitivity,
1999                      sizeof(int32_t) );
2000    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2001                      &gCamCapability[cameraId]->processed_min_duration,
2002                      sizeof(int32_t));
2003    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2004                      &gCamCapability[cameraId]->jpeg_min_duration,
2005                      sizeof(int32_t));
2006
2007    gStaticMetadata[cameraId] = staticInfo.release();
2008    return rc;
2009}
2010
2011/*===========================================================================
2012 * FUNCTION   : makeTable
2013 *
2014 * DESCRIPTION: make a table of sizes
2015 *
2016 * PARAMETERS :
2017 *
2018 *
2019 *==========================================================================*/
2020void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2021                                          int32_t* sizeTable)
2022{
2023    int j = 0;
2024    for (int i = 0; i < size; i++) {
2025        sizeTable[j] = dimTable[i].width;
2026        sizeTable[j+1] = dimTable[i].height;
2027        j+=2;
2028    }
2029}
2030
2031/*===========================================================================
2032 * FUNCTION   : makeFPSTable
2033 *
2034 * DESCRIPTION: make a table of fps ranges
2035 *
2036 * PARAMETERS :
2037 *
2038 *==========================================================================*/
2039void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2040                                          int32_t* fpsRangesTable)
2041{
2042    int j = 0;
2043    for (int i = 0; i < size; i++) {
2044        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2045        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2046        j+=2;
2047    }
2048}
2049
2050/*===========================================================================
2051 * FUNCTION   : makeOverridesList
2052 *
2053 * DESCRIPTION: make a list of scene mode overrides
2054 *
2055 * PARAMETERS :
2056 *
2057 *
2058 *==========================================================================*/
2059void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2060                                                  uint8_t size, uint8_t* overridesList,
2061                                                  uint8_t* supported_indexes,
2062                                                  int camera_id)
2063{
2064    /*daemon will give a list of overrides for all scene modes.
2065      However we should send the fwk only the overrides for the scene modes
2066      supported by the framework*/
2067    int j = 0, index = 0, supt = 0;
2068    uint8_t focus_override;
2069    for (int i = 0; i < size; i++) {
2070        supt = 0;
2071        index = supported_indexes[i];
2072        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2073        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2074                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2075                                                    overridesTable[index].awb_mode);
2076        focus_override = (uint8_t)overridesTable[index].af_mode;
2077        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2078           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2079              supt = 1;
2080              break;
2081           }
2082        }
2083        if (supt) {
2084           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2085                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2086                                              focus_override);
2087        } else {
2088           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2089        }
2090        j+=3;
2091    }
2092}
2093
2094/*===========================================================================
2095 * FUNCTION   : getPreviewHalPixelFormat
2096 *
2097 * DESCRIPTION: convert the format to type recognized by framework
2098 *
2099 * PARAMETERS : format : the format from backend
2100 *
2101 ** RETURN    : format recognized by framework
2102 *
2103 *==========================================================================*/
2104int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2105{
2106    int32_t halPixelFormat;
2107
2108    switch (format) {
2109    case CAM_FORMAT_YUV_420_NV12:
2110        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2111        break;
2112    case CAM_FORMAT_YUV_420_NV21:
2113        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2114        break;
2115    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2116        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2117        break;
2118    case CAM_FORMAT_YUV_420_YV12:
2119        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2120        break;
2121    case CAM_FORMAT_YUV_422_NV16:
2122    case CAM_FORMAT_YUV_422_NV61:
2123    default:
2124        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2125        break;
2126    }
2127    return halPixelFormat;
2128}
2129
2130/*===========================================================================
2131 * FUNCTION   : getSensorSensitivity
2132 *
2133 * DESCRIPTION: convert iso_mode to an integer value
2134 *
2135 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2136 *
2137 ** RETURN    : sensitivity supported by sensor
2138 *
2139 *==========================================================================*/
2140int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2141{
2142    int32_t sensitivity;
2143
2144    switch (iso_mode) {
2145    case CAM_ISO_MODE_100:
2146        sensitivity = 100;
2147        break;
2148    case CAM_ISO_MODE_200:
2149        sensitivity = 200;
2150        break;
2151    case CAM_ISO_MODE_400:
2152        sensitivity = 400;
2153        break;
2154    case CAM_ISO_MODE_800:
2155        sensitivity = 800;
2156        break;
2157    case CAM_ISO_MODE_1600:
2158        sensitivity = 1600;
2159        break;
2160    default:
2161        sensitivity = -1;
2162        break;
2163    }
2164    return sensitivity;
2165}
2166
2167
2168/*===========================================================================
2169 * FUNCTION   : AddSetParmEntryToBatch
2170 *
2171 * DESCRIPTION: add set parameter entry into batch
2172 *
2173 * PARAMETERS :
2174 *   @p_table     : ptr to parameter buffer
2175 *   @paramType   : parameter type
2176 *   @paramLength : length of parameter value
2177 *   @paramValue  : ptr to parameter value
2178 *
2179 * RETURN     : int32_t type of status
2180 *              NO_ERROR  -- success
2181 *              none-zero failure code
2182 *==========================================================================*/
2183int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2184                                                          cam_intf_parm_type_t paramType,
2185                                                          uint32_t paramLength,
2186                                                          void *paramValue)
2187{
2188    int position = paramType;
2189    int current, next;
2190
2191    /*************************************************************************
2192    *                 Code to take care of linking next flags                *
2193    *************************************************************************/
2194    current = GET_FIRST_PARAM_ID(p_table);
2195    if (position == current){
2196        //DO NOTHING
2197    } else if (position < current){
2198        SET_NEXT_PARAM_ID(position, p_table, current);
2199        SET_FIRST_PARAM_ID(p_table, position);
2200    } else {
2201        /* Search for the position in the linked list where we need to slot in*/
2202        while (position > GET_NEXT_PARAM_ID(current, p_table))
2203            current = GET_NEXT_PARAM_ID(current, p_table);
2204
2205        /*If node already exists no need to alter linking*/
2206        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2207            next = GET_NEXT_PARAM_ID(current, p_table);
2208            SET_NEXT_PARAM_ID(current, p_table, position);
2209            SET_NEXT_PARAM_ID(position, p_table, next);
2210        }
2211    }
2212
2213    /*************************************************************************
2214    *                   Copy contents into entry                             *
2215    *************************************************************************/
2216
2217    if (paramLength > sizeof(parm_type_t)) {
2218        ALOGE("%s:Size of input larger than max entry size",__func__);
2219        return BAD_VALUE;
2220    }
2221    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2222    return NO_ERROR;
2223}
2224
2225/*===========================================================================
2226 * FUNCTION   : lookupFwkName
2227 *
2228 * DESCRIPTION: In case the enum is not same in fwk and backend
2229 *              make sure the parameter is correctly propogated
2230 *
2231 * PARAMETERS  :
2232 *   @arr      : map between the two enums
2233 *   @len      : len of the map
2234 *   @hal_name : name of the hal_parm to map
2235 *
2236 * RETURN     : int type of status
2237 *              fwk_name  -- success
2238 *              none-zero failure code
2239 *==========================================================================*/
2240int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2241                                             int len, int hal_name)
2242{
2243
2244    for (int i = 0; i < len; i++) {
2245        if (arr[i].hal_name == hal_name)
2246            return arr[i].fwk_name;
2247    }
2248
2249    /* Not able to find matching framework type is not necessarily
2250     * an error case. This happens when mm-camera supports more attributes
2251     * than the frameworks do */
2252    ALOGD("%s: Cannot find matching framework type", __func__);
2253    return NAME_NOT_FOUND;
2254}
2255
2256/*===========================================================================
2257 * FUNCTION   : lookupHalName
2258 *
2259 * DESCRIPTION: In case the enum is not same in fwk and backend
2260 *              make sure the parameter is correctly propogated
2261 *
2262 * PARAMETERS  :
2263 *   @arr      : map between the two enums
2264 *   @len      : len of the map
2265 *   @fwk_name : name of the hal_parm to map
2266 *
2267 * RETURN     : int32_t type of status
2268 *              hal_name  -- success
2269 *              none-zero failure code
2270 *==========================================================================*/
2271int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2272                                             int len, int fwk_name)
2273{
2274    for (int i = 0; i < len; i++) {
2275       if (arr[i].fwk_name == fwk_name)
2276           return arr[i].hal_name;
2277    }
2278    ALOGE("%s: Cannot find matching hal type", __func__);
2279    return NAME_NOT_FOUND;
2280}
2281
2282/*===========================================================================
2283 * FUNCTION   : getCapabilities
2284 *
2285 * DESCRIPTION: query camera capabilities
2286 *
2287 * PARAMETERS :
2288 *   @cameraId  : camera Id
2289 *   @info      : camera info struct to be filled in with camera capabilities
2290 *
2291 * RETURN     : int32_t type of status
2292 *              NO_ERROR  -- success
2293 *              none-zero failure code
2294 *==========================================================================*/
2295int QCamera3HardwareInterface::getCamInfo(int cameraId,
2296                                    struct camera_info *info)
2297{
2298    int rc = 0;
2299
2300    if (NULL == gCamCapability[cameraId]) {
2301        rc = initCapabilities(cameraId);
2302        if (rc < 0) {
2303            //pthread_mutex_unlock(&g_camlock);
2304            return rc;
2305        }
2306    }
2307
2308    if (NULL == gStaticMetadata[cameraId]) {
2309        rc = initStaticMetadata(cameraId);
2310        if (rc < 0) {
2311            return rc;
2312        }
2313    }
2314
2315    switch(gCamCapability[cameraId]->position) {
2316    case CAM_POSITION_BACK:
2317        info->facing = CAMERA_FACING_BACK;
2318        break;
2319
2320    case CAM_POSITION_FRONT:
2321        info->facing = CAMERA_FACING_FRONT;
2322        break;
2323
2324    default:
2325        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2326        rc = -1;
2327        break;
2328    }
2329
2330
2331    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2332    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2333    info->static_camera_characteristics = gStaticMetadata[cameraId];
2334
2335    return rc;
2336}
2337
2338/*===========================================================================
2339 * FUNCTION   : translateMetadata
2340 *
2341 * DESCRIPTION: translate the metadata into camera_metadata_t
2342 *
2343 * PARAMETERS : type of the request
2344 *
2345 *
2346 * RETURN     : success: camera_metadata_t*
2347 *              failure: NULL
2348 *
2349 *==========================================================================*/
2350camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2351{
2352    pthread_mutex_lock(&mMutex);
2353
2354    if (mDefaultMetadata[type] != NULL) {
2355        pthread_mutex_unlock(&mMutex);
2356        return mDefaultMetadata[type];
2357    }
2358    //first time we are handling this request
2359    //fill up the metadata structure using the wrapper class
2360    CameraMetadata settings;
2361    //translate from cam_capability_t to camera_metadata_tag_t
2362    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2363    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2364
2365    /*control*/
2366
2367    uint8_t controlIntent = 0;
2368    switch (type) {
2369      case CAMERA3_TEMPLATE_PREVIEW:
2370        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2371        break;
2372      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2373        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2374        break;
2375      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2376        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2377        break;
2378      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2379        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2380        break;
2381      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2382        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2383        break;
2384      default:
2385        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2386        break;
2387    }
2388    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2389
2390    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2391            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2392
2393    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2394    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2395
2396    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2397    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2398
2399    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2400    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2401
2402    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2403    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2404
2405    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2406    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2407
2408    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2409    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2410
2411    static uint8_t focusMode;
2412    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2413        ALOGE("%s: Setting focus mode to auto", __func__);
2414        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2415    } else {
2416        ALOGE("%s: Setting focus mode to off", __func__);
2417        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2418    }
2419    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2420
2421    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2422    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2423
2424    /*flash*/
2425    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2426    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2427
2428
2429    /* lens */
2430    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2431    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2432
2433    if (gCamCapability[mCameraId]->filter_densities_count) {
2434        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2435        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2436                        gCamCapability[mCameraId]->filter_densities_count);
2437    }
2438
2439    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2440    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2441
2442    mDefaultMetadata[type] = settings.release();
2443
2444    pthread_mutex_unlock(&mMutex);
2445    return mDefaultMetadata[type];
2446}
2447
2448/*===========================================================================
2449 * FUNCTION   : setFrameParameters
2450 *
2451 * DESCRIPTION: set parameters per frame as requested in the metadata from
2452 *              framework
2453 *
2454 * PARAMETERS :
2455 *   @settings  : frame settings information from framework
2456 *
2457 *
2458 * RETURN     : success: NO_ERROR
2459 *              failure:
2460 *==========================================================================*/
2461int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2462                                                  const camera_metadata_t *settings)
2463{
2464    /*translate from camera_metadata_t type to parm_type_t*/
2465    int rc = 0;
2466    if (settings == NULL && mFirstRequest) {
2467        /*settings cannot be null for the first request*/
2468        return BAD_VALUE;
2469    }
2470
2471    int32_t hal_version = CAM_HAL_V3;
2472
2473    memset(mParameters, 0, sizeof(parm_buffer_t));
2474    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2475    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2476                sizeof(hal_version), &hal_version);
2477
2478    /*we need to update the frame number in the parameters*/
2479    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2480                                sizeof(frame_id), &frame_id);
2481    if (rc < 0) {
2482        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2483        return BAD_VALUE;
2484    }
2485
2486    if(settings != NULL){
2487        rc = translateMetadataToParameters(settings);
2488    }
2489    /*set the parameters to backend*/
2490    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2491    return rc;
2492}
2493
2494/*===========================================================================
2495 * FUNCTION   : translateMetadataToParameters
2496 *
2497 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2498 *
2499 *
2500 * PARAMETERS :
2501 *   @settings  : frame settings information from framework
2502 *
2503 *
2504 * RETURN     : success: NO_ERROR
2505 *              failure:
2506 *==========================================================================*/
2507int QCamera3HardwareInterface::translateMetadataToParameters
2508                                  (const camera_metadata_t *settings)
2509{
2510    int rc = 0;
2511    CameraMetadata frame_settings;
2512    frame_settings = settings;
2513
2514
2515    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2516        int32_t antibandingMode =
2517            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2518        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2519                sizeof(antibandingMode), &antibandingMode);
2520    }
2521
2522    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2523        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2524        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2525          sizeof(expCompensation), &expCompensation);
2526    }
2527
2528    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2529        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2530        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2531                sizeof(aeLock), &aeLock);
2532    }
2533
2534    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2535        cam_fps_range_t fps_range;
2536        fps_range.min_fps =
2537            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2538        fps_range.max_fps =
2539            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2540        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2541                sizeof(fps_range), &fps_range);
2542    }
2543
2544    float focalDistance = -1.0;
2545    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2546        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2547        rc = AddSetParmEntryToBatch(mParameters,
2548                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2549                sizeof(focalDistance), &focalDistance);
2550    }
2551
2552    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2553        uint8_t fwk_focusMode =
2554            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2555        uint8_t focusMode;
2556        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2557            focusMode = CAM_FOCUS_MODE_INFINITY;
2558        } else{
2559         focusMode = lookupHalName(FOCUS_MODES_MAP,
2560                                   sizeof(FOCUS_MODES_MAP),
2561                                   fwk_focusMode);
2562        }
2563        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2564                sizeof(focusMode), &focusMode);
2565    }
2566
2567    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2568        uint8_t awbLock =
2569            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2570        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2571                sizeof(awbLock), &awbLock);
2572    }
2573
2574    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2575        uint8_t fwk_whiteLevel =
2576            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2577        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2578                sizeof(WHITE_BALANCE_MODES_MAP),
2579                fwk_whiteLevel);
2580        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2581                sizeof(whiteLevel), &whiteLevel);
2582    }
2583
2584    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2585        uint8_t fwk_effectMode =
2586            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2587        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2588                sizeof(EFFECT_MODES_MAP),
2589                fwk_effectMode);
2590        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2591                sizeof(effectMode), &effectMode);
2592    }
2593
2594    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2595        uint8_t fwk_aeMode =
2596            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2597        uint8_t aeMode;
2598        int32_t redeye;
2599        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2600            aeMode = CAM_AE_MODE_OFF;
2601        } else {
2602            aeMode = CAM_AE_MODE_ON;
2603        }
2604        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2605            redeye = 1;
2606        } else {
2607            redeye = 0;
2608        }
2609        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2610                                          sizeof(AE_FLASH_MODE_MAP),
2611                                          fwk_aeMode);
2612        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2613                sizeof(aeMode), &aeMode);
2614        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2615                sizeof(flashMode), &flashMode);
2616        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2617                sizeof(redeye), &redeye);
2618    }
2619
2620    if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) {
2621        int32_t metaFrameNumber =
2622            frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0];
2623        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2624                sizeof(metaFrameNumber), &metaFrameNumber);
2625    }
2626
2627    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2628        uint8_t colorCorrectMode =
2629            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2630        rc =
2631            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2632                    sizeof(colorCorrectMode), &colorCorrectMode);
2633    }
2634    cam_trigger_t aecTrigger;
2635    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2636    aecTrigger.trigger_id = -1;
2637    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2638        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2639        aecTrigger.trigger =
2640            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2641        aecTrigger.trigger_id =
2642            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2643    }
2644    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2645                                sizeof(aecTrigger), &aecTrigger);
2646
2647    /*af_trigger must come with a trigger id*/
2648    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2649        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2650        cam_trigger_t af_trigger;
2651        af_trigger.trigger =
2652            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2653        af_trigger.trigger_id =
2654            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2655        rc = AddSetParmEntryToBatch(mParameters,
2656                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2657    }
2658
2659    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2660        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2661        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2662                sizeof(metaMode), &metaMode);
2663        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2664           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2665           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2666                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2667                                             fwk_sceneMode);
2668           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2669                sizeof(sceneMode), &sceneMode);
2670        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2671           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2672           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2673                sizeof(sceneMode), &sceneMode);
2674        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2675           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2676           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2677                sizeof(sceneMode), &sceneMode);
2678        }
2679    }
2680
2681    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2682        int32_t demosaic =
2683            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2684        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2685                sizeof(demosaic), &demosaic);
2686    }
2687
2688    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2689        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2690        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2691                sizeof(edgeMode), &edgeMode);
2692    }
2693
2694    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2695        int32_t edgeStrength =
2696            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2697        rc = AddSetParmEntryToBatch(mParameters,
2698                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2699    }
2700
2701    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2702        uint8_t flashMode =
2703            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2704        rc = AddSetParmEntryToBatch(mParameters,
2705                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2706    }
2707
2708    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2709        uint8_t flashPower =
2710            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2711        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2712                sizeof(flashPower), &flashPower);
2713    }
2714
2715    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2716        int64_t flashFiringTime =
2717            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2718        rc = AddSetParmEntryToBatch(mParameters,
2719                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2720    }
2721
2722    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2723        uint8_t geometricMode =
2724            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2725        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2726                sizeof(geometricMode), &geometricMode);
2727    }
2728
2729    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2730        uint8_t geometricStrength =
2731            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2732        rc = AddSetParmEntryToBatch(mParameters,
2733                CAM_INTF_META_GEOMETRIC_STRENGTH,
2734                sizeof(geometricStrength), &geometricStrength);
2735    }
2736
2737    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2738        uint8_t hotPixelMode =
2739            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2740        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2741                sizeof(hotPixelMode), &hotPixelMode);
2742    }
2743
2744    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2745        float lensAperture =
2746            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2747        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2748                sizeof(lensAperture), &lensAperture);
2749    }
2750
2751    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2752        float filterDensity =
2753            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2754        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2755                sizeof(filterDensity), &filterDensity);
2756    }
2757
2758    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2759        float focalLength =
2760            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2761        rc = AddSetParmEntryToBatch(mParameters,
2762                CAM_INTF_META_LENS_FOCAL_LENGTH,
2763                sizeof(focalLength), &focalLength);
2764    }
2765
2766    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2767        uint8_t optStabMode =
2768            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2769        rc = AddSetParmEntryToBatch(mParameters,
2770                CAM_INTF_META_LENS_OPT_STAB_MODE,
2771                sizeof(optStabMode), &optStabMode);
2772    }
2773
2774    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2775        uint8_t noiseRedMode =
2776            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2777        rc = AddSetParmEntryToBatch(mParameters,
2778                CAM_INTF_META_NOISE_REDUCTION_MODE,
2779                sizeof(noiseRedMode), &noiseRedMode);
2780    }
2781
2782    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2783        uint8_t noiseRedStrength =
2784            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2785        rc = AddSetParmEntryToBatch(mParameters,
2786                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2787                sizeof(noiseRedStrength), &noiseRedStrength);
2788    }
2789
2790    cam_crop_region_t scalerCropRegion;
2791    bool scalerCropSet = false;
2792    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2793        scalerCropRegion.left =
2794            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2795        scalerCropRegion.top =
2796            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2797        scalerCropRegion.width =
2798            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2799        scalerCropRegion.height =
2800            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2801        rc = AddSetParmEntryToBatch(mParameters,
2802                CAM_INTF_META_SCALER_CROP_REGION,
2803                sizeof(scalerCropRegion), &scalerCropRegion);
2804        scalerCropSet = true;
2805    }
2806
2807    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2808        int64_t sensorExpTime =
2809            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2810        rc = AddSetParmEntryToBatch(mParameters,
2811                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2812                sizeof(sensorExpTime), &sensorExpTime);
2813    }
2814
2815    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2816        int64_t sensorFrameDuration =
2817            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2818        rc = AddSetParmEntryToBatch(mParameters,
2819                CAM_INTF_META_SENSOR_FRAME_DURATION,
2820                sizeof(sensorFrameDuration), &sensorFrameDuration);
2821    }
2822
2823    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2824        int32_t sensorSensitivity =
2825            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2826        rc = AddSetParmEntryToBatch(mParameters,
2827                CAM_INTF_META_SENSOR_SENSITIVITY,
2828                sizeof(sensorSensitivity), &sensorSensitivity);
2829    }
2830
2831    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2832        int32_t shadingMode =
2833            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2834        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2835                sizeof(shadingMode), &shadingMode);
2836    }
2837
2838    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2839        uint8_t shadingStrength =
2840            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2841        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2842                sizeof(shadingStrength), &shadingStrength);
2843    }
2844
2845    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2846        uint8_t facedetectMode =
2847            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2848        rc = AddSetParmEntryToBatch(mParameters,
2849                CAM_INTF_META_STATS_FACEDETECT_MODE,
2850                sizeof(facedetectMode), &facedetectMode);
2851    }
2852
2853    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2854        uint8_t histogramMode =
2855            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2856        rc = AddSetParmEntryToBatch(mParameters,
2857                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2858                sizeof(histogramMode), &histogramMode);
2859    }
2860
2861    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2862        uint8_t sharpnessMapMode =
2863            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2864        rc = AddSetParmEntryToBatch(mParameters,
2865                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2866                sizeof(sharpnessMapMode), &sharpnessMapMode);
2867    }
2868
2869    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2870        uint8_t tonemapMode =
2871            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2872        rc = AddSetParmEntryToBatch(mParameters,
2873                CAM_INTF_META_TONEMAP_MODE,
2874                sizeof(tonemapMode), &tonemapMode);
2875    }
2876
2877    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2878        uint8_t captureIntent =
2879            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2880        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2881                sizeof(captureIntent), &captureIntent);
2882    }
2883
2884    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2885        cam_area_t roi;
2886        bool reset = true;
2887        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2888        if (scalerCropSet) {
2889            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2890        }
2891        if (reset) {
2892            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2893                    sizeof(roi), &roi);
2894        }
2895    }
2896
2897    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2898        cam_area_t roi;
2899        bool reset = true;
2900        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2901        if (scalerCropSet) {
2902            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2903        }
2904        if (reset) {
2905            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2906                    sizeof(roi), &roi);
2907        }
2908    }
2909
2910    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2911        cam_area_t roi;
2912        bool reset = true;
2913        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2914        if (scalerCropSet) {
2915            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2916        }
2917        if (reset) {
2918            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2919                    sizeof(roi), &roi);
2920        }
2921    }
2922    return rc;
2923}
2924
2925/*===========================================================================
2926 * FUNCTION   : getJpegSettings
2927 *
2928 * DESCRIPTION: save the jpeg settings in the HAL
2929 *
2930 *
2931 * PARAMETERS :
2932 *   @settings  : frame settings information from framework
2933 *
2934 *
2935 * RETURN     : success: NO_ERROR
2936 *              failure:
2937 *==========================================================================*/
2938int QCamera3HardwareInterface::getJpegSettings
2939                                  (const camera_metadata_t *settings)
2940{
2941    if (mJpegSettings) {
2942        if (mJpegSettings->gps_timestamp) {
2943            free(mJpegSettings->gps_timestamp);
2944            mJpegSettings->gps_timestamp = NULL;
2945        }
2946        if (mJpegSettings->gps_coordinates) {
2947            for (int i = 0; i < 3; i++) {
2948                free(mJpegSettings->gps_coordinates[i]);
2949                mJpegSettings->gps_coordinates[i] = NULL;
2950            }
2951        }
2952        free(mJpegSettings);
2953        mJpegSettings = NULL;
2954    }
2955    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2956    CameraMetadata jpeg_settings;
2957    jpeg_settings = settings;
2958
2959    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2960        mJpegSettings->jpeg_orientation =
2961            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
2962    } else {
2963        mJpegSettings->jpeg_orientation = 0;
2964    }
2965    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
2966        mJpegSettings->jpeg_quality =
2967            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
2968    } else {
2969        mJpegSettings->jpeg_quality = 85;
2970    }
2971    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2972        mJpegSettings->thumbnail_size.width =
2973            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
2974        mJpegSettings->thumbnail_size.height =
2975            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
2976    } else {
2977        mJpegSettings->thumbnail_size.width = 0;
2978        mJpegSettings->thumbnail_size.height = 0;
2979    }
2980    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
2981        for (int i = 0; i < 3; i++) {
2982            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
2983            *(mJpegSettings->gps_coordinates[i]) =
2984                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
2985        }
2986    } else{
2987       for (int i = 0; i < 3; i++) {
2988            mJpegSettings->gps_coordinates[i] = NULL;
2989        }
2990    }
2991
2992    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
2993        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
2994        *(mJpegSettings->gps_timestamp) =
2995            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
2996    } else {
2997        mJpegSettings->gps_timestamp = NULL;
2998    }
2999
3000    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3001        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3002        for (int i = 0; i < len; i++) {
3003            mJpegSettings->gps_processing_method[i] =
3004                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3005        }
3006        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3007            mJpegSettings->gps_processing_method[len] = '\0';
3008        }
3009    } else {
3010        mJpegSettings->gps_processing_method[0] = '\0';
3011    }
3012
3013    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3014        mJpegSettings->sensor_sensitivity =
3015            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3016    } else {
3017        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3018    }
3019
3020    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3021        mJpegSettings->lens_focal_length =
3022            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3023    }
3024    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3025        mJpegSettings->exposure_compensation =
3026            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3027    }
3028    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3029    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3030    return 0;
3031}
3032
3033/*===========================================================================
3034 * FUNCTION   : captureResultCb
3035 *
3036 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3037 *
3038 * PARAMETERS :
3039 *   @frame  : frame information from mm-camera-interface
3040 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3041 *   @userdata: userdata
3042 *
3043 * RETURN     : NONE
3044 *==========================================================================*/
3045void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3046                camera3_stream_buffer_t *buffer,
3047                uint32_t frame_number, void *userdata)
3048{
3049    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3050    if (hw == NULL) {
3051        ALOGE("%s: Invalid hw %p", __func__, hw);
3052        return;
3053    }
3054
3055    hw->captureResultCb(metadata, buffer, frame_number);
3056    return;
3057}
3058
3059/*===========================================================================
3060 * FUNCTION   : initialize
3061 *
3062 * DESCRIPTION: Pass framework callback pointers to HAL
3063 *
3064 * PARAMETERS :
3065 *
3066 *
3067 * RETURN     : Success : 0
3068 *              Failure: -ENODEV
3069 *==========================================================================*/
3070
3071int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3072                                  const camera3_callback_ops_t *callback_ops)
3073{
3074    ALOGV("%s: E", __func__);
3075    QCamera3HardwareInterface *hw =
3076        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3077    if (!hw) {
3078        ALOGE("%s: NULL camera device", __func__);
3079        return -ENODEV;
3080    }
3081
3082    int rc = hw->initialize(callback_ops);
3083    ALOGV("%s: X", __func__);
3084    return rc;
3085}
3086
3087/*===========================================================================
3088 * FUNCTION   : configure_streams
3089 *
3090 * DESCRIPTION:
3091 *
3092 * PARAMETERS :
3093 *
3094 *
3095 * RETURN     : Success: 0
3096 *              Failure: -EINVAL (if stream configuration is invalid)
3097 *                       -ENODEV (fatal error)
3098 *==========================================================================*/
3099
3100int QCamera3HardwareInterface::configure_streams(
3101        const struct camera3_device *device,
3102        camera3_stream_configuration_t *stream_list)
3103{
3104    ALOGV("%s: E", __func__);
3105    QCamera3HardwareInterface *hw =
3106        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3107    if (!hw) {
3108        ALOGE("%s: NULL camera device", __func__);
3109        return -ENODEV;
3110    }
3111    int rc = hw->configureStreams(stream_list);
3112    ALOGV("%s: X", __func__);
3113    return rc;
3114}
3115
3116/*===========================================================================
3117 * FUNCTION   : register_stream_buffers
3118 *
3119 * DESCRIPTION: Register stream buffers with the device
3120 *
3121 * PARAMETERS :
3122 *
3123 * RETURN     :
3124 *==========================================================================*/
3125int QCamera3HardwareInterface::register_stream_buffers(
3126        const struct camera3_device *device,
3127        const camera3_stream_buffer_set_t *buffer_set)
3128{
3129    ALOGV("%s: E", __func__);
3130    QCamera3HardwareInterface *hw =
3131        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3132    if (!hw) {
3133        ALOGE("%s: NULL camera device", __func__);
3134        return -ENODEV;
3135    }
3136    int rc = hw->registerStreamBuffers(buffer_set);
3137    ALOGV("%s: X", __func__);
3138    return rc;
3139}
3140
3141/*===========================================================================
3142 * FUNCTION   : construct_default_request_settings
3143 *
3144 * DESCRIPTION: Configure a settings buffer to meet the required use case
3145 *
3146 * PARAMETERS :
3147 *
3148 *
3149 * RETURN     : Success: Return valid metadata
3150 *              Failure: Return NULL
3151 *==========================================================================*/
3152const camera_metadata_t* QCamera3HardwareInterface::
3153    construct_default_request_settings(const struct camera3_device *device,
3154                                        int type)
3155{
3156
3157    ALOGV("%s: E", __func__);
3158    camera_metadata_t* fwk_metadata = NULL;
3159    QCamera3HardwareInterface *hw =
3160        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3161    if (!hw) {
3162        ALOGE("%s: NULL camera device", __func__);
3163        return NULL;
3164    }
3165
3166    fwk_metadata = hw->translateCapabilityToMetadata(type);
3167
3168    ALOGV("%s: X", __func__);
3169    return fwk_metadata;
3170}
3171
3172/*===========================================================================
3173 * FUNCTION   : process_capture_request
3174 *
3175 * DESCRIPTION:
3176 *
3177 * PARAMETERS :
3178 *
3179 *
3180 * RETURN     :
3181 *==========================================================================*/
3182int QCamera3HardwareInterface::process_capture_request(
3183                    const struct camera3_device *device,
3184                    camera3_capture_request_t *request)
3185{
3186    ALOGV("%s: E", __func__);
3187    QCamera3HardwareInterface *hw =
3188        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3189    if (!hw) {
3190        ALOGE("%s: NULL camera device", __func__);
3191        return -EINVAL;
3192    }
3193
3194    int rc = hw->processCaptureRequest(request);
3195    ALOGV("%s: X", __func__);
3196    return rc;
3197}
3198
3199/*===========================================================================
3200 * FUNCTION   : get_metadata_vendor_tag_ops
3201 *
3202 * DESCRIPTION:
3203 *
3204 * PARAMETERS :
3205 *
3206 *
3207 * RETURN     :
3208 *==========================================================================*/
3209
3210void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3211                const struct camera3_device *device,
3212                vendor_tag_query_ops_t* ops)
3213{
3214    ALOGV("%s: E", __func__);
3215    QCamera3HardwareInterface *hw =
3216        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3217    if (!hw) {
3218        ALOGE("%s: NULL camera device", __func__);
3219        return;
3220    }
3221
3222    hw->getMetadataVendorTagOps(ops);
3223    ALOGV("%s: X", __func__);
3224    return;
3225}
3226
3227/*===========================================================================
3228 * FUNCTION   : dump
3229 *
3230 * DESCRIPTION:
3231 *
3232 * PARAMETERS :
3233 *
3234 *
3235 * RETURN     :
3236 *==========================================================================*/
3237
3238void QCamera3HardwareInterface::dump(
3239                const struct camera3_device *device, int fd)
3240{
3241    ALOGV("%s: E", __func__);
3242    QCamera3HardwareInterface *hw =
3243        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3244    if (!hw) {
3245        ALOGE("%s: NULL camera device", __func__);
3246        return;
3247    }
3248
3249    hw->dump(fd);
3250    ALOGV("%s: X", __func__);
3251    return;
3252}
3253
3254/*===========================================================================
3255 * FUNCTION   : close_camera_device
3256 *
3257 * DESCRIPTION:
3258 *
3259 * PARAMETERS :
3260 *
3261 *
3262 * RETURN     :
3263 *==========================================================================*/
3264int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3265{
3266    ALOGV("%s: E", __func__);
3267    int ret = NO_ERROR;
3268    QCamera3HardwareInterface *hw =
3269        reinterpret_cast<QCamera3HardwareInterface *>(
3270            reinterpret_cast<camera3_device_t *>(device)->priv);
3271    if (!hw) {
3272        ALOGE("NULL camera device");
3273        return BAD_VALUE;
3274    }
3275    delete hw;
3276
3277    pthread_mutex_lock(&mCameraSessionLock);
3278    mCameraSessionActive = 0;
3279    pthread_mutex_unlock(&mCameraSessionLock);
3280    ALOGV("%s: X", __func__);
3281    return ret;
3282}
3283
3284}; //end namespace qcamera
3285