QCamera3HWI.cpp revision 8bb5784949bef76a5880ca3c9a795c71b6ac680b
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mFirstRequest(false),
162      mParamHeap(NULL),
163      mParameters(NULL),
164      mJpegSettings(NULL),
165      mIsZslMode(false),
166      m_pPowerModule(NULL)
167{
168    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
169    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
170    mCameraDevice.common.close = close_camera_device;
171    mCameraDevice.ops = &mCameraOps;
172    mCameraDevice.priv = this;
173    gCamCapability[cameraId]->version = CAM_HAL_V3;
174
175    pthread_mutex_init(&mRequestLock, NULL);
176    pthread_cond_init(&mRequestCond, NULL);
177    mPendingRequest = 0;
178    mCurrentRequestId = -1;
179    pthread_mutex_init(&mMutex, NULL);
180    pthread_mutex_init(&mCaptureResultLock, NULL);
181
182    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
183        mDefaultMetadata[i] = NULL;
184
185#ifdef HAS_MULTIMEDIA_HINTS
186    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
187        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
188    }
189#endif
190}
191
192/*===========================================================================
193 * FUNCTION   : ~QCamera3HardwareInterface
194 *
195 * DESCRIPTION: destructor of QCamera3HardwareInterface
196 *
197 * PARAMETERS : none
198 *
199 * RETURN     : none
200 *==========================================================================*/
201QCamera3HardwareInterface::~QCamera3HardwareInterface()
202{
203    ALOGV("%s: E", __func__);
204    /* We need to stop all streams before deleting any stream */
205    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
206        it != mStreamInfo.end(); it++) {
207        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
208        if (channel)
209            channel->stop();
210    }
211    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
212        it != mStreamInfo.end(); it++) {
213        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
214        if (channel)
215            delete channel;
216        free (*it);
217    }
218
219    if (mJpegSettings != NULL) {
220        free(mJpegSettings);
221        mJpegSettings = NULL;
222    }
223
224    /* Clean up all channels */
225    if (mCameraInitialized) {
226        mMetadataChannel->stop();
227        delete mMetadataChannel;
228        mMetadataChannel = NULL;
229        deinitParameters();
230    }
231
232    if (mCameraOpened)
233        closeCamera();
234
235    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
236        if (mDefaultMetadata[i])
237            free_camera_metadata(mDefaultMetadata[i]);
238
239    pthread_mutex_destroy(&mRequestLock);
240    pthread_cond_destroy(&mRequestCond);
241
242    pthread_mutex_destroy(&mMutex);
243    pthread_mutex_destroy(&mCaptureResultLock);
244    ALOGV("%s: X", __func__);
245}
246
247/*===========================================================================
248 * FUNCTION   : openCamera
249 *
250 * DESCRIPTION: open camera
251 *
252 * PARAMETERS :
253 *   @hw_device  : double ptr for camera device struct
254 *
255 * RETURN     : int32_t type of status
256 *              NO_ERROR  -- success
257 *              none-zero failure code
258 *==========================================================================*/
259int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
260{
261    int rc = 0;
262    pthread_mutex_lock(&mCameraSessionLock);
263    if (mCameraSessionActive) {
264        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
265        pthread_mutex_unlock(&mCameraSessionLock);
266        return INVALID_OPERATION;
267    }
268
269    if (mCameraOpened) {
270        *hw_device = NULL;
271        return PERMISSION_DENIED;
272    }
273
274    rc = openCamera();
275    if (rc == 0) {
276        *hw_device = &mCameraDevice.common;
277        mCameraSessionActive = 1;
278    } else
279        *hw_device = NULL;
280
281#ifdef HAS_MULTIMEDIA_HINTS
282    if (rc == 0) {
283        if (m_pPowerModule) {
284            if (m_pPowerModule->powerHint) {
285                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
286                        (void *)"state=1");
287            }
288        }
289    }
290#endif
291    pthread_mutex_unlock(&mCameraSessionLock);
292    return rc;
293}
294
295/*===========================================================================
296 * FUNCTION   : openCamera
297 *
298 * DESCRIPTION: open camera
299 *
300 * PARAMETERS : none
301 *
302 * RETURN     : int32_t type of status
303 *              NO_ERROR  -- success
304 *              none-zero failure code
305 *==========================================================================*/
306int QCamera3HardwareInterface::openCamera()
307{
308    if (mCameraHandle) {
309        ALOGE("Failure: Camera already opened");
310        return ALREADY_EXISTS;
311    }
312    mCameraHandle = camera_open(mCameraId);
313    if (!mCameraHandle) {
314        ALOGE("camera_open failed.");
315        return UNKNOWN_ERROR;
316    }
317
318    mCameraOpened = true;
319
320    return NO_ERROR;
321}
322
323/*===========================================================================
324 * FUNCTION   : closeCamera
325 *
326 * DESCRIPTION: close camera
327 *
328 * PARAMETERS : none
329 *
330 * RETURN     : int32_t type of status
331 *              NO_ERROR  -- success
332 *              none-zero failure code
333 *==========================================================================*/
334int QCamera3HardwareInterface::closeCamera()
335{
336    int rc = NO_ERROR;
337
338    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
339    mCameraHandle = NULL;
340    mCameraOpened = false;
341
342#ifdef HAS_MULTIMEDIA_HINTS
343    if (rc == NO_ERROR) {
344        if (m_pPowerModule) {
345            if (m_pPowerModule->powerHint) {
346                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
347                        (void *)"state=0");
348            }
349        }
350    }
351#endif
352
353    return rc;
354}
355
356/*===========================================================================
357 * FUNCTION   : initialize
358 *
359 * DESCRIPTION: Initialize frameworks callback functions
360 *
361 * PARAMETERS :
362 *   @callback_ops : callback function to frameworks
363 *
364 * RETURN     :
365 *
366 *==========================================================================*/
367int QCamera3HardwareInterface::initialize(
368        const struct camera3_callback_ops *callback_ops)
369{
370    int rc;
371
372    pthread_mutex_lock(&mMutex);
373
374    rc = initParameters();
375    if (rc < 0) {
376        ALOGE("%s: initParamters failed %d", __func__, rc);
377       goto err1;
378    }
379    //Create metadata channel and initialize it
380    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
381                    mCameraHandle->ops, captureResultCb,
382                    &gCamCapability[mCameraId]->padding_info, this);
383    if (mMetadataChannel == NULL) {
384        ALOGE("%s: failed to allocate metadata channel", __func__);
385        rc = -ENOMEM;
386        goto err2;
387    }
388    rc = mMetadataChannel->initialize();
389    if (rc < 0) {
390        ALOGE("%s: metadata channel initialization failed", __func__);
391        goto err3;
392    }
393
394    mCallbackOps = callback_ops;
395
396    pthread_mutex_unlock(&mMutex);
397    mCameraInitialized = true;
398    return 0;
399
400err3:
401    delete mMetadataChannel;
402    mMetadataChannel = NULL;
403err2:
404    deinitParameters();
405err1:
406    pthread_mutex_unlock(&mMutex);
407    return rc;
408}
409
410/*===========================================================================
411 * FUNCTION   : configureStreams
412 *
413 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
414 *              and output streams.
415 *
416 * PARAMETERS :
417 *   @stream_list : streams to be configured
418 *
419 * RETURN     :
420 *
421 *==========================================================================*/
422int QCamera3HardwareInterface::configureStreams(
423        camera3_stream_configuration_t *streamList)
424{
425    int rc = 0;
426    pthread_mutex_lock(&mMutex);
427    // Sanity check stream_list
428    if (streamList == NULL) {
429        ALOGE("%s: NULL stream configuration", __func__);
430        pthread_mutex_unlock(&mMutex);
431        return BAD_VALUE;
432    }
433
434    if (streamList->streams == NULL) {
435        ALOGE("%s: NULL stream list", __func__);
436        pthread_mutex_unlock(&mMutex);
437        return BAD_VALUE;
438    }
439
440    if (streamList->num_streams < 1) {
441        ALOGE("%s: Bad number of streams requested: %d", __func__,
442                streamList->num_streams);
443        pthread_mutex_unlock(&mMutex);
444        return BAD_VALUE;
445    }
446
447    camera3_stream_t *inputStream = NULL;
448    camera3_stream_t *jpegStream = NULL;
449    /* first invalidate all the steams in the mStreamList
450     * if they appear again, they will be validated */
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end(); it++) {
453        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454        channel->stop();
455        (*it)->status = INVALID;
456    }
457
458    for (size_t i = 0; i < streamList->num_streams; i++) {
459        camera3_stream_t *newStream = streamList->streams[i];
460        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
461                __func__, newStream->stream_type, newStream->format,
462                 newStream->width, newStream->height);
463        //if the stream is in the mStreamList validate it
464        bool stream_exists = false;
465        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
466                it != mStreamInfo.end(); it++) {
467            if ((*it)->stream == newStream) {
468                QCamera3Channel *channel =
469                    (QCamera3Channel*)(*it)->stream->priv;
470                stream_exists = true;
471                (*it)->status = RECONFIGURE;
472                /*delete the channel object associated with the stream because
473                  we need to reconfigure*/
474                delete channel;
475                (*it)->stream->priv = NULL;
476            }
477        }
478        if (!stream_exists) {
479            //new stream
480            stream_info_t* stream_info;
481            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
482            stream_info->stream = newStream;
483            stream_info->status = VALID;
484            stream_info->registered = 0;
485            mStreamInfo.push_back(stream_info);
486        }
487        if (newStream->stream_type == CAMERA3_STREAM_INPUT
488                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
489            if (inputStream != NULL) {
490                ALOGE("%s: Multiple input streams requested!", __func__);
491                pthread_mutex_unlock(&mMutex);
492                return BAD_VALUE;
493            }
494            inputStream = newStream;
495        }
496        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
497            jpegStream = newStream;
498        }
499    }
500    mInputStream = inputStream;
501
502    /*clean up invalid streams*/
503    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
504            it != mStreamInfo.end();) {
505        if(((*it)->status) == INVALID){
506            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
507            delete channel;
508            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
509            free(*it);
510            it = mStreamInfo.erase(it);
511        } else {
512            it++;
513        }
514    }
515
516    //mMetadataChannel->stop();
517
518    /* Allocate channel objects for the requested streams */
519    for (size_t i = 0; i < streamList->num_streams; i++) {
520        camera3_stream_t *newStream = streamList->streams[i];
521        if (newStream->priv == NULL) {
522            //New stream, construct channel
523            switch (newStream->stream_type) {
524            case CAMERA3_STREAM_INPUT:
525                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
526                break;
527            case CAMERA3_STREAM_BIDIRECTIONAL:
528                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
529                    GRALLOC_USAGE_HW_CAMERA_WRITE;
530                break;
531            case CAMERA3_STREAM_OUTPUT:
532                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
533                break;
534            default:
535                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
536                break;
537            }
538
539            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
540                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
541                QCamera3Channel *channel;
542                switch (newStream->format) {
543                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
544                case HAL_PIXEL_FORMAT_YCbCr_420_888:
545                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
546                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
547                        jpegStream) {
548                        uint32_t width = jpegStream->width;
549                        uint32_t height = jpegStream->height;
550                        mIsZslMode = true;
551                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
552                            mCameraHandle->ops, captureResultCb,
553                            &gCamCapability[mCameraId]->padding_info, this, newStream,
554                            width, height);
555                    } else
556                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
557                            mCameraHandle->ops, captureResultCb,
558                            &gCamCapability[mCameraId]->padding_info, this, newStream);
559                    if (channel == NULL) {
560                        ALOGE("%s: allocation of channel failed", __func__);
561                        pthread_mutex_unlock(&mMutex);
562                        return -ENOMEM;
563                    }
564
565                    newStream->priv = channel;
566                    break;
567                case HAL_PIXEL_FORMAT_BLOB:
568                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
569                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
570                            mCameraHandle->ops, captureResultCb,
571                            &gCamCapability[mCameraId]->padding_info, this, newStream);
572                    if (channel == NULL) {
573                        ALOGE("%s: allocation of channel failed", __func__);
574                        pthread_mutex_unlock(&mMutex);
575                        return -ENOMEM;
576                    }
577                    newStream->priv = channel;
578                    break;
579
580                //TODO: Add support for app consumed format?
581                default:
582                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
583                    break;
584                }
585            }
586        } else {
587            // Channel already exists for this stream
588            // Do nothing for now
589        }
590    }
591    /*For the streams to be reconfigured we need to register the buffers
592      since the framework wont*/
593    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
594            it != mStreamInfo.end(); it++) {
595        if ((*it)->status == RECONFIGURE) {
596            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
597            /*only register buffers for streams that have already been
598              registered*/
599            if ((*it)->registered) {
600                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
601                        (*it)->buffer_set.buffers);
602                if (rc != NO_ERROR) {
603                    ALOGE("%s: Failed to register the buffers of old stream,\
604                            rc = %d", __func__, rc);
605                }
606                ALOGV("%s: channel %p has %d buffers",
607                        __func__, channel, (*it)->buffer_set.num_buffers);
608            }
609        }
610
611        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
612        if (index == NAME_NOT_FOUND) {
613            mPendingBuffersMap.add((*it)->stream, 0);
614        } else {
615            mPendingBuffersMap.editValueAt(index) = 0;
616        }
617    }
618
619    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
620    mPendingRequestsList.clear();
621
622    //settings/parameters don't carry over for new configureStreams
623    memset(mParameters, 0, sizeof(parm_buffer_t));
624    mFirstRequest = true;
625
626    pthread_mutex_unlock(&mMutex);
627    return rc;
628}
629
630/*===========================================================================
631 * FUNCTION   : validateCaptureRequest
632 *
633 * DESCRIPTION: validate a capture request from camera service
634 *
635 * PARAMETERS :
636 *   @request : request from framework to process
637 *
638 * RETURN     :
639 *
640 *==========================================================================*/
641int QCamera3HardwareInterface::validateCaptureRequest(
642                    camera3_capture_request_t *request)
643{
644    ssize_t idx = 0;
645    const camera3_stream_buffer_t *b;
646    CameraMetadata meta;
647
648    /* Sanity check the request */
649    if (request == NULL) {
650        ALOGE("%s: NULL capture request", __func__);
651        return BAD_VALUE;
652    }
653
654    uint32_t frameNumber = request->frame_number;
655    if (request->input_buffer != NULL &&
656            request->input_buffer->stream != mInputStream) {
657        ALOGE("%s: Request %d: Input buffer not from input stream!",
658                __FUNCTION__, frameNumber);
659        return BAD_VALUE;
660    }
661    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
662        ALOGE("%s: Request %d: No output buffers provided!",
663                __FUNCTION__, frameNumber);
664        return BAD_VALUE;
665    }
666    if (request->input_buffer != NULL) {
667        b = request->input_buffer;
668        QCamera3Channel *channel =
669            static_cast<QCamera3Channel*>(b->stream->priv);
670        if (channel == NULL) {
671            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
672                    __func__, frameNumber, idx);
673            return BAD_VALUE;
674        }
675        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
676            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
677                    __func__, frameNumber, idx);
678            return BAD_VALUE;
679        }
680        if (b->release_fence != -1) {
681            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
682                    __func__, frameNumber, idx);
683            return BAD_VALUE;
684        }
685        if (b->buffer == NULL) {
686            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
687                    __func__, frameNumber, idx);
688            return BAD_VALUE;
689        }
690    }
691
692    // Validate all buffers
693    b = request->output_buffers;
694    do {
695        QCamera3Channel *channel =
696                static_cast<QCamera3Channel*>(b->stream->priv);
697        if (channel == NULL) {
698            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
699                    __func__, frameNumber, idx);
700            return BAD_VALUE;
701        }
702        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
703            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
704                    __func__, frameNumber, idx);
705            return BAD_VALUE;
706        }
707        if (b->release_fence != -1) {
708            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
709                    __func__, frameNumber, idx);
710            return BAD_VALUE;
711        }
712        if (b->buffer == NULL) {
713            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
714                    __func__, frameNumber, idx);
715            return BAD_VALUE;
716        }
717        idx++;
718        b = request->output_buffers + idx;
719    } while (idx < (ssize_t)request->num_output_buffers);
720
721    return NO_ERROR;
722}
723
724/*===========================================================================
725 * FUNCTION   : registerStreamBuffers
726 *
727 * DESCRIPTION: Register buffers for a given stream with the HAL device.
728 *
729 * PARAMETERS :
730 *   @stream_list : streams to be configured
731 *
732 * RETURN     :
733 *
734 *==========================================================================*/
735int QCamera3HardwareInterface::registerStreamBuffers(
736        const camera3_stream_buffer_set_t *buffer_set)
737{
738    int rc = 0;
739
740    pthread_mutex_lock(&mMutex);
741
742    if (buffer_set == NULL) {
743        ALOGE("%s: Invalid buffer_set parameter.", __func__);
744        pthread_mutex_unlock(&mMutex);
745        return -EINVAL;
746    }
747    if (buffer_set->stream == NULL) {
748        ALOGE("%s: Invalid stream parameter.", __func__);
749        pthread_mutex_unlock(&mMutex);
750        return -EINVAL;
751    }
752    if (buffer_set->num_buffers < 1) {
753        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
754        pthread_mutex_unlock(&mMutex);
755        return -EINVAL;
756    }
757    if (buffer_set->buffers == NULL) {
758        ALOGE("%s: Invalid buffers parameter.", __func__);
759        pthread_mutex_unlock(&mMutex);
760        return -EINVAL;
761    }
762
763    camera3_stream_t *stream = buffer_set->stream;
764    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
765
766    //set the buffer_set in the mStreamInfo array
767    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
768            it != mStreamInfo.end(); it++) {
769        if ((*it)->stream == stream) {
770            uint32_t numBuffers = buffer_set->num_buffers;
771            (*it)->buffer_set.stream = buffer_set->stream;
772            (*it)->buffer_set.num_buffers = numBuffers;
773            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
774            if ((*it)->buffer_set.buffers == NULL) {
775                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
776                pthread_mutex_unlock(&mMutex);
777                return -ENOMEM;
778            }
779            for (size_t j = 0; j < numBuffers; j++){
780                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
781            }
782            (*it)->registered = 1;
783        }
784    }
785    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
786    if (rc < 0) {
787        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
788        pthread_mutex_unlock(&mMutex);
789        return -ENODEV;
790    }
791
792    pthread_mutex_unlock(&mMutex);
793    return NO_ERROR;
794}
795
796/*===========================================================================
797 * FUNCTION   : processCaptureRequest
798 *
799 * DESCRIPTION: process a capture request from camera service
800 *
801 * PARAMETERS :
802 *   @request : request from framework to process
803 *
804 * RETURN     :
805 *
806 *==========================================================================*/
807int QCamera3HardwareInterface::processCaptureRequest(
808                    camera3_capture_request_t *request)
809{
810    int rc = NO_ERROR;
811    int32_t request_id;
812    CameraMetadata meta;
813
814    pthread_mutex_lock(&mMutex);
815
816    rc = validateCaptureRequest(request);
817    if (rc != NO_ERROR) {
818        ALOGE("%s: incoming request is not valid", __func__);
819        pthread_mutex_unlock(&mMutex);
820        return rc;
821    }
822
823    uint32_t frameNumber = request->frame_number;
824    rc = setFrameParameters(request->frame_number, request->settings);
825    if (rc < 0) {
826        ALOGE("%s: fail to set frame parameters", __func__);
827        pthread_mutex_unlock(&mMutex);
828        return rc;
829    }
830
831    meta = request->settings;
832    if (meta.exists(ANDROID_REQUEST_ID)) {
833        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
834        mCurrentRequestId = request_id;
835        ALOGV("%s: Received request with id: %d",__func__, request_id);
836    } else if (mFirstRequest || mCurrentRequestId == -1){
837        ALOGE("%s: Unable to find request id field, \
838                & no previous id available", __func__);
839        return NAME_NOT_FOUND;
840    } else {
841        ALOGV("%s: Re-using old request id", __func__);
842        request_id = mCurrentRequestId;
843    }
844
845    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
846                                    __func__, __LINE__,
847                                    request->num_output_buffers,
848                                    request->input_buffer,
849                                    frameNumber);
850    // Acquire all request buffers first
851    for (size_t i = 0; i < request->num_output_buffers; i++) {
852        const camera3_stream_buffer_t& output = request->output_buffers[i];
853        sp<Fence> acquireFence = new Fence(output.acquire_fence);
854
855        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
856        //Call function to store local copy of jpeg data for encode params.
857            rc = getJpegSettings(request->settings);
858            if (rc < 0) {
859                ALOGE("%s: failed to get jpeg parameters", __func__);
860                pthread_mutex_unlock(&mMutex);
861                return rc;
862            }
863        }
864
865        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
866        if (rc != OK) {
867            ALOGE("%s: fence wait failed %d", __func__, rc);
868            pthread_mutex_unlock(&mMutex);
869            return rc;
870        }
871    }
872
873    /* Update pending request list and pending buffers map */
874    pthread_mutex_lock(&mRequestLock);
875    PendingRequestInfo pendingRequest;
876    pendingRequest.frame_number = frameNumber;
877    pendingRequest.num_buffers = request->num_output_buffers;
878    pendingRequest.request_id = request_id;
879
880    for (size_t i = 0; i < request->num_output_buffers; i++) {
881        RequestedBufferInfo requestedBuf;
882        requestedBuf.stream = request->output_buffers[i].stream;
883        requestedBuf.buffer = NULL;
884        pendingRequest.buffers.push_back(requestedBuf);
885
886        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
887    }
888    mPendingRequestsList.push_back(pendingRequest);
889    pthread_mutex_unlock(&mRequestLock);
890
891    // Notify metadata channel we receive a request
892    mMetadataChannel->request(NULL, frameNumber);
893
894    // Call request on other streams
895    for (size_t i = 0; i < request->num_output_buffers; i++) {
896        const camera3_stream_buffer_t& output = request->output_buffers[i];
897        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
898        mm_camera_buf_def_t *pInputBuffer = NULL;
899
900        if (channel == NULL) {
901            ALOGE("%s: invalid channel pointer for stream", __func__);
902            continue;
903        }
904
905        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
906            QCamera3RegularChannel* inputChannel = NULL;
907            if(request->input_buffer != NULL){
908
909                //Try to get the internal format
910                inputChannel = (QCamera3RegularChannel*)
911                    request->input_buffer->stream->priv;
912                if(inputChannel == NULL ){
913                    ALOGE("%s: failed to get input channel handle", __func__);
914                } else {
915                    pInputBuffer =
916                        inputChannel->getInternalFormatBuffer(
917                                request->input_buffer->buffer);
918                    ALOGD("%s: Input buffer dump",__func__);
919                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
920                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
921                    ALOGD("frame len:%d", pInputBuffer->frame_len);
922                }
923            }
924            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
925                            pInputBuffer,(QCamera3Channel*)inputChannel);
926        } else {
927            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
928                __LINE__, output.buffer, frameNumber);
929            rc = channel->request(output.buffer, frameNumber);
930        }
931        if (rc < 0)
932            ALOGE("%s: request failed", __func__);
933    }
934
935    mFirstRequest = false;
936
937    //Block on conditional variable
938    pthread_mutex_lock(&mRequestLock);
939    mPendingRequest = 1;
940    while (mPendingRequest == 1) {
941        pthread_cond_wait(&mRequestCond, &mRequestLock);
942    }
943    pthread_mutex_unlock(&mRequestLock);
944
945    pthread_mutex_unlock(&mMutex);
946    return rc;
947}
948
949/*===========================================================================
950 * FUNCTION   : getMetadataVendorTagOps
951 *
952 * DESCRIPTION:
953 *
954 * PARAMETERS :
955 *
956 *
957 * RETURN     :
958 *==========================================================================*/
959void QCamera3HardwareInterface::getMetadataVendorTagOps(
960                    vendor_tag_query_ops_t* /*ops*/)
961{
962    /* Enable locks when we eventually add Vendor Tags */
963    /*
964    pthread_mutex_lock(&mMutex);
965
966    pthread_mutex_unlock(&mMutex);
967    */
968    return;
969}
970
971/*===========================================================================
972 * FUNCTION   : dump
973 *
974 * DESCRIPTION:
975 *
976 * PARAMETERS :
977 *
978 *
979 * RETURN     :
980 *==========================================================================*/
981void QCamera3HardwareInterface::dump(int /*fd*/)
982{
983    /*Enable lock when we implement this function*/
984    /*
985    pthread_mutex_lock(&mMutex);
986
987    pthread_mutex_unlock(&mMutex);
988    */
989    return;
990}
991
992/*===========================================================================
993 * FUNCTION   : captureResultCb
994 *
995 * DESCRIPTION: Callback handler for all capture result
996 *              (streams, as well as metadata)
997 *
998 * PARAMETERS :
999 *   @metadata : metadata information
1000 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1001 *               NULL if metadata.
1002 *
1003 * RETURN     : NONE
1004 *==========================================================================*/
1005void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1006                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1007{
1008    pthread_mutex_lock(&mRequestLock);
1009
1010    if (metadata_buf) {
1011        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1012        int32_t frame_number_valid = *(int32_t *)
1013            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1014        uint32_t frame_number = *(uint32_t *)
1015            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1016        const struct timeval *tv = (const struct timeval *)
1017            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1018        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1019            tv->tv_usec * NSEC_PER_USEC;
1020
1021        if (!frame_number_valid) {
1022            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1023            mMetadataChannel->bufDone(metadata_buf);
1024            goto done_metadata;
1025        }
1026        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1027                frame_number, capture_time);
1028
1029        // Go through the pending requests info and send shutter/results to frameworks
1030        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1031                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1032            camera3_capture_result_t result;
1033            camera3_notify_msg_t notify_msg;
1034            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1035
1036            // Flush out all entries with less or equal frame numbers.
1037
1038            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1039            //Right now it's the same as metadata timestamp
1040
1041            //TODO: When there is metadata drop, how do we derive the timestamp of
1042            //dropped frames? For now, we fake the dropped timestamp by substracting
1043            //from the reported timestamp
1044            nsecs_t current_capture_time = capture_time -
1045                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1046
1047            // Send shutter notify to frameworks
1048            notify_msg.type = CAMERA3_MSG_SHUTTER;
1049            notify_msg.message.shutter.frame_number = i->frame_number;
1050            notify_msg.message.shutter.timestamp = current_capture_time;
1051            mCallbackOps->notify(mCallbackOps, &notify_msg);
1052            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1053                    i->frame_number, capture_time);
1054
1055            // Send empty metadata with already filled buffers for dropped metadata
1056            // and send valid metadata with already filled buffers for current metadata
1057            if (i->frame_number < frame_number) {
1058                CameraMetadata dummyMetadata;
1059                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1060                        &current_capture_time, 1);
1061                dummyMetadata.update(ANDROID_REQUEST_ID,
1062                        &(i->request_id), 1);
1063                result.result = dummyMetadata.release();
1064            } else {
1065                result.result = translateCbMetadataToResultMetadata(metadata,
1066                        current_capture_time, i->request_id);
1067                // Return metadata buffer
1068                mMetadataChannel->bufDone(metadata_buf);
1069            }
1070            if (!result.result) {
1071                ALOGE("%s: metadata is NULL", __func__);
1072            }
1073            result.frame_number = i->frame_number;
1074            result.num_output_buffers = 0;
1075            result.output_buffers = NULL;
1076            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1077                    j != i->buffers.end(); j++) {
1078                if (j->buffer) {
1079                    result.num_output_buffers++;
1080                }
1081            }
1082
1083            if (result.num_output_buffers > 0) {
1084                camera3_stream_buffer_t *result_buffers =
1085                    new camera3_stream_buffer_t[result.num_output_buffers];
1086                if (!result_buffers) {
1087                    ALOGE("%s: Fatal error: out of memory", __func__);
1088                }
1089                size_t result_buffers_idx = 0;
1090                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1091                        j != i->buffers.end(); j++) {
1092                    if (j->buffer) {
1093                        result_buffers[result_buffers_idx++] = *(j->buffer);
1094                        free(j->buffer);
1095                        j->buffer = NULL;
1096                        mPendingBuffersMap.editValueFor(j->stream)--;
1097                    }
1098                }
1099                result.output_buffers = result_buffers;
1100
1101                mCallbackOps->process_capture_result(mCallbackOps, &result);
1102                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1103                        __func__, result.frame_number, current_capture_time);
1104                free_camera_metadata((camera_metadata_t *)result.result);
1105                delete[] result_buffers;
1106            } else {
1107                mCallbackOps->process_capture_result(mCallbackOps, &result);
1108                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1109                        __func__, result.frame_number, current_capture_time);
1110                free_camera_metadata((camera_metadata_t *)result.result);
1111            }
1112            // erase the element from the list
1113            i = mPendingRequestsList.erase(i);
1114        }
1115
1116
1117done_metadata:
1118        bool max_buffers_dequeued = false;
1119        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1120            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1121            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1122            if (queued_buffers == stream->max_buffers) {
1123                max_buffers_dequeued = true;
1124                break;
1125            }
1126        }
1127        if (!max_buffers_dequeued) {
1128            // Unblock process_capture_request
1129            mPendingRequest = 0;
1130            pthread_cond_signal(&mRequestCond);
1131        }
1132    } else {
1133        // If the frame number doesn't exist in the pending request list,
1134        // directly send the buffer to the frameworks, and update pending buffers map
1135        // Otherwise, book-keep the buffer.
1136        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1137        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1138            i++;
1139        }
1140        if (i == mPendingRequestsList.end()) {
1141            // Verify all pending requests frame_numbers are greater
1142            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1143                    j != mPendingRequestsList.end(); j++) {
1144                if (j->frame_number < frame_number) {
1145                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1146                            __func__, j->frame_number, frame_number);
1147                }
1148            }
1149            camera3_capture_result_t result;
1150            result.result = NULL;
1151            result.frame_number = frame_number;
1152            result.num_output_buffers = 1;
1153            result.output_buffers = buffer;
1154            ALOGV("%s: result frame_number = %d, buffer = %p",
1155                    __func__, frame_number, buffer);
1156            mPendingBuffersMap.editValueFor(buffer->stream)--;
1157            mCallbackOps->process_capture_result(mCallbackOps, &result);
1158        } else {
1159            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1160                    j != i->buffers.end(); j++) {
1161                if (j->stream == buffer->stream) {
1162                    if (j->buffer != NULL) {
1163                        ALOGE("%s: Error: buffer is already set", __func__);
1164                    } else {
1165                        j->buffer = (camera3_stream_buffer_t *)malloc(
1166                                sizeof(camera3_stream_buffer_t));
1167                        *(j->buffer) = *buffer;
1168                        ALOGV("%s: cache buffer %p at result frame_number %d",
1169                                __func__, buffer, frame_number);
1170                    }
1171                }
1172            }
1173        }
1174    }
1175
1176    pthread_mutex_unlock(&mRequestLock);
1177    return;
1178}
1179
1180/*===========================================================================
1181 * FUNCTION   : translateCbMetadataToResultMetadata
1182 *
1183 * DESCRIPTION:
1184 *
1185 * PARAMETERS :
1186 *   @metadata : metadata information from callback
1187 *
1188 * RETURN     : camera_metadata_t*
1189 *              metadata in a format specified by fwk
1190 *==========================================================================*/
1191camera_metadata_t*
1192QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1193                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1194                                 int32_t request_id)
1195{
1196    CameraMetadata camMetadata;
1197    camera_metadata_t* resultMetadata;
1198
1199    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1200    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1201
1202    /*CAM_INTF_META_HISTOGRAM - TODO*/
1203    /*cam_hist_stats_t  *histogram =
1204      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1205      metadata);*/
1206
1207    /*face detection*/
1208    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1209        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1210    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1211    int32_t faceIds[numFaces];
1212    uint8_t faceScores[numFaces];
1213    int32_t faceRectangles[numFaces * 4];
1214    int32_t faceLandmarks[numFaces * 6];
1215    int j = 0, k = 0;
1216    for (int i = 0; i < numFaces; i++) {
1217        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1218        faceScores[i] = faceDetectionInfo->faces[i].score;
1219        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1220                faceRectangles+j, -1);
1221        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1222        j+= 4;
1223        k+= 6;
1224    }
1225    if (numFaces > 0) {
1226        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1227        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1228        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1229            faceRectangles, numFaces*4);
1230        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1231            faceLandmarks, numFaces*6);
1232    }
1233
1234    uint8_t  *color_correct_mode =
1235        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1236    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1237
1238    int32_t  *ae_precapture_id =
1239        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1240    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1241
1242    /*aec regions*/
1243    cam_area_t  *hAeRegions =
1244        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1245    int32_t aeRegions[5];
1246    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1247    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1248   if(mIsZslMode) {
1249        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1250        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1251    } else {
1252        uint8_t *ae_state =
1253            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1254        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1255   }
1256    uint8_t  *focusMode =
1257        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1258    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1259
1260    /*af regions*/
1261    cam_area_t  *hAfRegions =
1262        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1263    int32_t afRegions[5];
1264    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1265    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1266
1267    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1268    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1269
1270    int32_t  *afTriggerId =
1271        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1272    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1273
1274    uint8_t  *whiteBalance =
1275        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1276    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1277
1278    /*awb regions*/
1279    cam_area_t  *hAwbRegions =
1280        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1281    int32_t awbRegions[5];
1282    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1283    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1284
1285    uint8_t  *whiteBalanceState =
1286        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1287    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1288
1289    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1290    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1291
1292    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1293    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1294
1295    uint8_t  *flashPower =
1296        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1297    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1298
1299    int64_t  *flashFiringTime =
1300        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1301    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1302
1303    /*int32_t  *ledMode =
1304      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1305      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1306
1307    uint8_t  *flashState =
1308        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1309    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1310
1311    uint8_t  *hotPixelMode =
1312        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1313    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1314
1315    float  *lensAperture =
1316        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1317    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1318
1319    float  *filterDensity =
1320        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1321    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1322
1323    float  *focalLength =
1324        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1325    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1326
1327    float  *focusDistance =
1328        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1329    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1330
1331    float  *focusRange =
1332        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1333    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1334
1335    uint8_t  *opticalStab =
1336        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1337    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1338
1339    /*int32_t  *focusState =
1340      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1341      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1342
1343    uint8_t  *noiseRedMode =
1344        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1345    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1346
1347    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1348
1349    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1350        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1351    int32_t scalerCropRegion[4];
1352    scalerCropRegion[0] = hScalerCropRegion->left;
1353    scalerCropRegion[1] = hScalerCropRegion->top;
1354    scalerCropRegion[2] = hScalerCropRegion->width;
1355    scalerCropRegion[3] = hScalerCropRegion->height;
1356    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1357
1358    int64_t  *sensorExpTime =
1359        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1360    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1361
1362    int64_t  *sensorFameDuration =
1363        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1364    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1365
1366    int32_t  *sensorSensitivity =
1367        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1368    mMetadataResponse.iso_speed = *sensorSensitivity;
1369    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1370
1371    uint8_t  *shadingMode =
1372        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1373    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1374
1375    uint8_t  *faceDetectMode =
1376        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1377    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1378
1379    uint8_t  *histogramMode =
1380        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1381    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1382
1383    uint8_t  *sharpnessMapMode =
1384        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1385    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1386            sharpnessMapMode, 1);
1387
1388    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1389    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1390        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1391    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1392            (int32_t*)sharpnessMap->sharpness,
1393            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1394
1395    resultMetadata = camMetadata.release();
1396    return resultMetadata;
1397}
1398
1399/*===========================================================================
1400 * FUNCTION   : convertToRegions
1401 *
1402 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1403 *
1404 * PARAMETERS :
1405 *   @rect   : cam_rect_t struct to convert
1406 *   @region : int32_t destination array
1407 *   @weight : if we are converting from cam_area_t, weight is valid
1408 *             else weight = -1
1409 *
1410 *==========================================================================*/
1411void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1412    region[0] = rect.left;
1413    region[1] = rect.top;
1414    region[2] = rect.left + rect.width;
1415    region[3] = rect.top + rect.height;
1416    if (weight > -1) {
1417        region[4] = weight;
1418    }
1419}
1420
1421/*===========================================================================
1422 * FUNCTION   : convertFromRegions
1423 *
1424 * DESCRIPTION: helper method to convert from array to cam_rect_t
1425 *
1426 * PARAMETERS :
1427 *   @rect   : cam_rect_t struct to convert
1428 *   @region : int32_t destination array
1429 *   @weight : if we are converting from cam_area_t, weight is valid
1430 *             else weight = -1
1431 *
1432 *==========================================================================*/
1433void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1434                                                   const camera_metadata_t *settings,
1435                                                   uint32_t tag){
1436    CameraMetadata frame_settings;
1437    frame_settings = settings;
1438    int32_t x_min = frame_settings.find(tag).data.i32[0];
1439    int32_t y_min = frame_settings.find(tag).data.i32[1];
1440    int32_t x_max = frame_settings.find(tag).data.i32[2];
1441    int32_t y_max = frame_settings.find(tag).data.i32[3];
1442    roi->weight = frame_settings.find(tag).data.i32[4];
1443    roi->rect.left = x_min;
1444    roi->rect.top = y_min;
1445    roi->rect.width = x_max - x_min;
1446    roi->rect.height = y_max - y_min;
1447}
1448
1449/*===========================================================================
1450 * FUNCTION   : resetIfNeededROI
1451 *
1452 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1453 *              crop region
1454 *
1455 * PARAMETERS :
1456 *   @roi       : cam_area_t struct to resize
1457 *   @scalerCropRegion : cam_crop_region_t region to compare against
1458 *
1459 *
1460 *==========================================================================*/
1461bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1462                                                 const cam_crop_region_t* scalerCropRegion)
1463{
1464    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1465    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1466    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1467    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1468    if ((roi_x_max < scalerCropRegion->left) ||
1469        (roi_y_max < scalerCropRegion->top)  ||
1470        (roi->rect.left > crop_x_max) ||
1471        (roi->rect.top > crop_y_max)){
1472        return false;
1473    }
1474    if (roi->rect.left < scalerCropRegion->left) {
1475        roi->rect.left = scalerCropRegion->left;
1476    }
1477    if (roi->rect.top < scalerCropRegion->top) {
1478        roi->rect.top = scalerCropRegion->top;
1479    }
1480    if (roi_x_max > crop_x_max) {
1481        roi_x_max = crop_x_max;
1482    }
1483    if (roi_y_max > crop_y_max) {
1484        roi_y_max = crop_y_max;
1485    }
1486    roi->rect.width = roi_x_max - roi->rect.left;
1487    roi->rect.height = roi_y_max - roi->rect.top;
1488    return true;
1489}
1490
1491/*===========================================================================
1492 * FUNCTION   : convertLandmarks
1493 *
1494 * DESCRIPTION: helper method to extract the landmarks from face detection info
1495 *
1496 * PARAMETERS :
1497 *   @face   : cam_rect_t struct to convert
1498 *   @landmarks : int32_t destination array
1499 *
1500 *
1501 *==========================================================================*/
1502void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1503{
1504    landmarks[0] = face.left_eye_center.x;
1505    landmarks[1] = face.left_eye_center.y;
1506    landmarks[2] = face.right_eye_center.y;
1507    landmarks[3] = face.right_eye_center.y;
1508    landmarks[4] = face.mouth_center.x;
1509    landmarks[5] = face.mouth_center.y;
1510}
1511
1512#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1513/*===========================================================================
1514 * FUNCTION   : initCapabilities
1515 *
1516 * DESCRIPTION: initialize camera capabilities in static data struct
1517 *
1518 * PARAMETERS :
1519 *   @cameraId  : camera Id
1520 *
1521 * RETURN     : int32_t type of status
1522 *              NO_ERROR  -- success
1523 *              none-zero failure code
1524 *==========================================================================*/
1525int QCamera3HardwareInterface::initCapabilities(int cameraId)
1526{
1527    int rc = 0;
1528    mm_camera_vtbl_t *cameraHandle = NULL;
1529    QCamera3HeapMemory *capabilityHeap = NULL;
1530
1531    cameraHandle = camera_open(cameraId);
1532    if (!cameraHandle) {
1533        ALOGE("%s: camera_open failed", __func__);
1534        rc = -1;
1535        goto open_failed;
1536    }
1537
1538    capabilityHeap = new QCamera3HeapMemory();
1539    if (capabilityHeap == NULL) {
1540        ALOGE("%s: creation of capabilityHeap failed", __func__);
1541        goto heap_creation_failed;
1542    }
1543    /* Allocate memory for capability buffer */
1544    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1545    if(rc != OK) {
1546        ALOGE("%s: No memory for cappability", __func__);
1547        goto allocate_failed;
1548    }
1549
1550    /* Map memory for capability buffer */
1551    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1552    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1553                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1554                                capabilityHeap->getFd(0),
1555                                sizeof(cam_capability_t));
1556    if(rc < 0) {
1557        ALOGE("%s: failed to map capability buffer", __func__);
1558        goto map_failed;
1559    }
1560
1561    /* Query Capability */
1562    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1563    if(rc < 0) {
1564        ALOGE("%s: failed to query capability",__func__);
1565        goto query_failed;
1566    }
1567    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1568    if (!gCamCapability[cameraId]) {
1569        ALOGE("%s: out of memory", __func__);
1570        goto query_failed;
1571    }
1572    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1573                                        sizeof(cam_capability_t));
1574    rc = 0;
1575
1576query_failed:
1577    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1578                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1579map_failed:
1580    capabilityHeap->deallocate();
1581allocate_failed:
1582    delete capabilityHeap;
1583heap_creation_failed:
1584    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1585    cameraHandle = NULL;
1586open_failed:
1587    return rc;
1588}
1589
1590/*===========================================================================
1591 * FUNCTION   : initParameters
1592 *
1593 * DESCRIPTION: initialize camera parameters
1594 *
1595 * PARAMETERS :
1596 *
1597 * RETURN     : int32_t type of status
1598 *              NO_ERROR  -- success
1599 *              none-zero failure code
1600 *==========================================================================*/
1601int QCamera3HardwareInterface::initParameters()
1602{
1603    int rc = 0;
1604
1605    //Allocate Set Param Buffer
1606    mParamHeap = new QCamera3HeapMemory();
1607    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1608    if(rc != OK) {
1609        rc = NO_MEMORY;
1610        ALOGE("Failed to allocate SETPARM Heap memory");
1611        delete mParamHeap;
1612        mParamHeap = NULL;
1613        return rc;
1614    }
1615
1616    //Map memory for parameters buffer
1617    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1618            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1619            mParamHeap->getFd(0),
1620            sizeof(parm_buffer_t));
1621    if(rc < 0) {
1622        ALOGE("%s:failed to map SETPARM buffer",__func__);
1623        rc = FAILED_TRANSACTION;
1624        mParamHeap->deallocate();
1625        delete mParamHeap;
1626        mParamHeap = NULL;
1627        return rc;
1628    }
1629
1630    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1631    return rc;
1632}
1633
1634/*===========================================================================
1635 * FUNCTION   : deinitParameters
1636 *
1637 * DESCRIPTION: de-initialize camera parameters
1638 *
1639 * PARAMETERS :
1640 *
1641 * RETURN     : NONE
1642 *==========================================================================*/
1643void QCamera3HardwareInterface::deinitParameters()
1644{
1645    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1646            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1647
1648    mParamHeap->deallocate();
1649    delete mParamHeap;
1650    mParamHeap = NULL;
1651
1652    mParameters = NULL;
1653}
1654
1655/*===========================================================================
1656 * FUNCTION   : calcMaxJpegSize
1657 *
1658 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1659 *
1660 * PARAMETERS :
1661 *
1662 * RETURN     : max_jpeg_size
1663 *==========================================================================*/
1664int QCamera3HardwareInterface::calcMaxJpegSize()
1665{
1666    int32_t max_jpeg_size = 0;
1667    int temp_width, temp_height;
1668    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1669        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1670        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1671        if (temp_width * temp_height > max_jpeg_size ) {
1672            max_jpeg_size = temp_width * temp_height;
1673        }
1674    }
1675    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1676    return max_jpeg_size;
1677}
1678
1679/*===========================================================================
1680 * FUNCTION   : initStaticMetadata
1681 *
1682 * DESCRIPTION: initialize the static metadata
1683 *
1684 * PARAMETERS :
1685 *   @cameraId  : camera Id
1686 *
1687 * RETURN     : int32_t type of status
1688 *              0  -- success
1689 *              non-zero failure code
1690 *==========================================================================*/
1691int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1692{
1693    int rc = 0;
1694    CameraMetadata staticInfo;
1695
1696    /* android.info: hardware level */
1697    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1698    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1699        &supportedHardwareLevel, 1);
1700
1701    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1702    /*HAL 3 only*/
1703    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1704                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1705
1706    /*hard coded for now but this should come from sensor*/
1707    float min_focus_distance;
1708    if(facingBack){
1709        min_focus_distance = 10;
1710    } else {
1711        min_focus_distance = 0;
1712    }
1713    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1714                    &min_focus_distance, 1);
1715
1716    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1717                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1718
1719    /*should be using focal lengths but sensor doesn't provide that info now*/
1720    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1721                      &gCamCapability[cameraId]->focal_length,
1722                      1);
1723
1724    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1725                      gCamCapability[cameraId]->apertures,
1726                      gCamCapability[cameraId]->apertures_count);
1727
1728    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1729                gCamCapability[cameraId]->filter_densities,
1730                gCamCapability[cameraId]->filter_densities_count);
1731
1732
1733    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1734                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1735                      gCamCapability[cameraId]->optical_stab_modes_count);
1736
1737    staticInfo.update(ANDROID_LENS_POSITION,
1738                      gCamCapability[cameraId]->lens_position,
1739                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1740
1741    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1742                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1743    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1744                      lens_shading_map_size,
1745                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1746
1747    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1748                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1749    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1750            geo_correction_map_size,
1751            sizeof(geo_correction_map_size)/sizeof(int32_t));
1752
1753    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1754                       gCamCapability[cameraId]->geo_correction_map,
1755                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1756
1757    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1758            gCamCapability[cameraId]->sensor_physical_size, 2);
1759
1760    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1761            gCamCapability[cameraId]->exposure_time_range, 2);
1762
1763    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1764            &gCamCapability[cameraId]->max_frame_duration, 1);
1765
1766
1767    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1768                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1769
1770    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1771                                               gCamCapability[cameraId]->pixel_array_size.height};
1772    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1773                      pixel_array_size, 2);
1774
1775    int32_t active_array_size[] = {0, 0,
1776                                                gCamCapability[cameraId]->active_array_size.width,
1777                                                gCamCapability[cameraId]->active_array_size.height};
1778    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1779                      active_array_size, 4);
1780
1781    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1782            &gCamCapability[cameraId]->white_level, 1);
1783
1784    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1785            gCamCapability[cameraId]->black_level_pattern, 4);
1786
1787    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1788                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1789
1790    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1791                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1792
1793    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1794                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1795    /*hardcode 0 for now*/
1796    int32_t max_face_count = 0;
1797    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1798                      &max_face_count, 1);
1799
1800    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1801                      &gCamCapability[cameraId]->histogram_size, 1);
1802
1803    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1804            &gCamCapability[cameraId]->max_histogram_count, 1);
1805
1806    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1807                                                gCamCapability[cameraId]->sharpness_map_size.height};
1808
1809    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1810            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1811
1812    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1813            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1814
1815
1816    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1817                      &gCamCapability[cameraId]->raw_min_duration,
1818                       1);
1819
1820    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1821                                                HAL_PIXEL_FORMAT_BLOB};
1822    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1823    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1824                      scalar_formats,
1825                      scalar_formats_count);
1826
1827    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1828    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1829              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1830              available_processed_sizes);
1831    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1832                available_processed_sizes,
1833                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1834
1835    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1836    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1837                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1838                 available_fps_ranges);
1839    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1840            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1841
1842    camera_metadata_rational exposureCompensationStep = {
1843            gCamCapability[cameraId]->exp_compensation_step.numerator,
1844            gCamCapability[cameraId]->exp_compensation_step.denominator};
1845    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1846                      &exposureCompensationStep, 1);
1847
1848    /*TO DO*/
1849    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1850    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1851                      availableVstabModes, sizeof(availableVstabModes));
1852
1853    /*HAL 1 and HAL 3 common*/
1854    float maxZoom = 4;
1855    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1856            &maxZoom, 1);
1857
1858    int32_t max3aRegions = 1;
1859    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1860            &max3aRegions, 1);
1861
1862    uint8_t availableFaceDetectModes[] = {
1863            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1864    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1865                      availableFaceDetectModes,
1866                      sizeof(availableFaceDetectModes));
1867
1868    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1869                                       gCamCapability[cameraId]->raw_dim.height};
1870    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1871                      raw_size,
1872                      sizeof(raw_size)/sizeof(uint32_t));
1873
1874    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1875                                                        gCamCapability[cameraId]->exposure_compensation_max};
1876    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1877            exposureCompensationRange,
1878            sizeof(exposureCompensationRange)/sizeof(int32_t));
1879
1880    uint8_t lensFacing = (facingBack) ?
1881            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1882    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1883
1884    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1885    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1886              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1887              available_jpeg_sizes);
1888    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1889                available_jpeg_sizes,
1890                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1891
1892    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1893                      available_thumbnail_sizes,
1894                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1895
1896    int32_t max_jpeg_size = 0;
1897    int temp_width, temp_height;
1898    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1899        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1900        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1901        if (temp_width * temp_height > max_jpeg_size ) {
1902            max_jpeg_size = temp_width * temp_height;
1903        }
1904    }
1905    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1906    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1907                      &max_jpeg_size, 1);
1908
1909    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1910    int32_t size = 0;
1911    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1912        int val = lookupFwkName(EFFECT_MODES_MAP,
1913                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1914                                   gCamCapability[cameraId]->supported_effects[i]);
1915        if (val != NAME_NOT_FOUND) {
1916            avail_effects[size] = (uint8_t)val;
1917            size++;
1918        }
1919    }
1920    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1921                      avail_effects,
1922                      size);
1923
1924    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1925    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1926    int32_t supported_scene_modes_cnt = 0;
1927    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1928        int val = lookupFwkName(SCENE_MODES_MAP,
1929                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1930                                gCamCapability[cameraId]->supported_scene_modes[i]);
1931        if (val != NAME_NOT_FOUND) {
1932            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1933            supported_indexes[supported_scene_modes_cnt] = i;
1934            supported_scene_modes_cnt++;
1935        }
1936    }
1937
1938    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1939                      avail_scene_modes,
1940                      supported_scene_modes_cnt);
1941
1942    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1943    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1944                      supported_scene_modes_cnt,
1945                      scene_mode_overrides,
1946                      supported_indexes,
1947                      cameraId);
1948    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1949                      scene_mode_overrides,
1950                      supported_scene_modes_cnt*3);
1951
1952    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1953    size = 0;
1954    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1955        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1956                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1957                                 gCamCapability[cameraId]->supported_antibandings[i]);
1958        if (val != NAME_NOT_FOUND) {
1959            avail_antibanding_modes[size] = (uint8_t)val;
1960            size++;
1961        }
1962
1963    }
1964    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1965                      avail_antibanding_modes,
1966                      size);
1967
1968    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1969    size = 0;
1970    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1971        int val = lookupFwkName(FOCUS_MODES_MAP,
1972                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1973                                gCamCapability[cameraId]->supported_focus_modes[i]);
1974        if (val != NAME_NOT_FOUND) {
1975            avail_af_modes[size] = (uint8_t)val;
1976            size++;
1977        }
1978    }
1979    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1980                      avail_af_modes,
1981                      size);
1982
1983    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1984    size = 0;
1985    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1986        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1987                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1988                                    gCamCapability[cameraId]->supported_white_balances[i]);
1989        if (val != NAME_NOT_FOUND) {
1990            avail_awb_modes[size] = (uint8_t)val;
1991            size++;
1992        }
1993    }
1994    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1995                      avail_awb_modes,
1996                      size);
1997
1998    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1999    size = 0;
2000    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
2001        int val = lookupFwkName(FLASH_MODES_MAP,
2002                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
2003                                gCamCapability[cameraId]->supported_flash_modes[i]);
2004        if (val != NAME_NOT_FOUND) {
2005            avail_flash_modes[size] = (uint8_t)val;
2006            size++;
2007        }
2008    }
2009    static uint8_t flashAvailable = 0;
2010    if (size > 1) {
2011        //flash is supported
2012        flashAvailable = 1;
2013    }
2014    staticInfo.update(ANDROID_FLASH_MODE,
2015                      avail_flash_modes,
2016                      size);
2017
2018    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2019            &flashAvailable, 1);
2020
2021    uint8_t avail_ae_modes[5];
2022    size = 0;
2023    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2024        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2025        size++;
2026    }
2027    if (flashAvailable) {
2028        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2029        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2030        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2031    }
2032    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2033                      avail_ae_modes,
2034                      size);
2035
2036    int32_t min = INT_MAX, max = INT_MIN;
2037    for (int i = 0; i < gCamCapability[cameraId]->supported_iso_modes_cnt; i++) {
2038        int32_t sensitivity = getSensorSensitivity(gCamCapability[cameraId]->supported_iso_modes[i]);
2039        if (sensitivity != -1) {
2040            min = (sensitivity >= min) ? min : sensitivity;
2041            max = (sensitivity <= max) ? max : sensitivity;
2042        }
2043    }
2044    int32_t sensitivity_range[] = {min, max};
2045    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2046                      sensitivity_range,
2047                      sizeof(sensitivity_range) / sizeof(int32_t));
2048
2049    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2050                      &gCamCapability[cameraId]->max_analog_sensitivity,
2051                      sizeof(int32_t) );
2052    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2053                      &gCamCapability[cameraId]->processed_min_duration,
2054                      sizeof(int32_t));
2055    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2056                      &gCamCapability[cameraId]->jpeg_min_duration,
2057                      sizeof(int32_t));
2058
2059    gStaticMetadata[cameraId] = staticInfo.release();
2060    return rc;
2061}
2062
2063/*===========================================================================
2064 * FUNCTION   : makeTable
2065 *
2066 * DESCRIPTION: make a table of sizes
2067 *
2068 * PARAMETERS :
2069 *
2070 *
2071 *==========================================================================*/
2072void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2073                                          int32_t* sizeTable)
2074{
2075    int j = 0;
2076    for (int i = 0; i < size; i++) {
2077        sizeTable[j] = dimTable[i].width;
2078        sizeTable[j+1] = dimTable[i].height;
2079        j+=2;
2080    }
2081}
2082
2083/*===========================================================================
2084 * FUNCTION   : makeFPSTable
2085 *
2086 * DESCRIPTION: make a table of fps ranges
2087 *
2088 * PARAMETERS :
2089 *
2090 *==========================================================================*/
2091void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2092                                          int32_t* fpsRangesTable)
2093{
2094    int j = 0;
2095    for (int i = 0; i < size; i++) {
2096        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2097        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2098        j+=2;
2099    }
2100}
2101
2102/*===========================================================================
2103 * FUNCTION   : makeOverridesList
2104 *
2105 * DESCRIPTION: make a list of scene mode overrides
2106 *
2107 * PARAMETERS :
2108 *
2109 *
2110 *==========================================================================*/
2111void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2112                                                  uint8_t size, uint8_t* overridesList,
2113                                                  uint8_t* supported_indexes,
2114                                                  int camera_id)
2115{
2116    /*daemon will give a list of overrides for all scene modes.
2117      However we should send the fwk only the overrides for the scene modes
2118      supported by the framework*/
2119    int j = 0, index = 0, supt = 0;
2120    uint8_t focus_override;
2121    for (int i = 0; i < size; i++) {
2122        supt = 0;
2123        index = supported_indexes[i];
2124        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2125        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2126                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2127                                                    overridesTable[index].awb_mode);
2128        focus_override = (uint8_t)overridesTable[index].af_mode;
2129        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2130           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2131              supt = 1;
2132              break;
2133           }
2134        }
2135        if (supt) {
2136           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2137                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2138                                              focus_override);
2139        } else {
2140           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2141        }
2142        j+=3;
2143    }
2144}
2145
2146/*===========================================================================
2147 * FUNCTION   : getPreviewHalPixelFormat
2148 *
2149 * DESCRIPTION: convert the format to type recognized by framework
2150 *
2151 * PARAMETERS : format : the format from backend
2152 *
2153 ** RETURN    : format recognized by framework
2154 *
2155 *==========================================================================*/
2156int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2157{
2158    int32_t halPixelFormat;
2159
2160    switch (format) {
2161    case CAM_FORMAT_YUV_420_NV12:
2162        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2163        break;
2164    case CAM_FORMAT_YUV_420_NV21:
2165        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2166        break;
2167    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2168        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2169        break;
2170    case CAM_FORMAT_YUV_420_YV12:
2171        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2172        break;
2173    case CAM_FORMAT_YUV_422_NV16:
2174    case CAM_FORMAT_YUV_422_NV61:
2175    default:
2176        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2177        break;
2178    }
2179    return halPixelFormat;
2180}
2181
2182/*===========================================================================
2183 * FUNCTION   : getSensorSensitivity
2184 *
2185 * DESCRIPTION: convert iso_mode to an integer value
2186 *
2187 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2188 *
2189 ** RETURN    : sensitivity supported by sensor
2190 *
2191 *==========================================================================*/
2192int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2193{
2194    int32_t sensitivity;
2195
2196    switch (iso_mode) {
2197    case CAM_ISO_MODE_100:
2198        sensitivity = 100;
2199        break;
2200    case CAM_ISO_MODE_200:
2201        sensitivity = 200;
2202        break;
2203    case CAM_ISO_MODE_400:
2204        sensitivity = 400;
2205        break;
2206    case CAM_ISO_MODE_800:
2207        sensitivity = 800;
2208        break;
2209    case CAM_ISO_MODE_1600:
2210        sensitivity = 1600;
2211        break;
2212    default:
2213        sensitivity = -1;
2214        break;
2215    }
2216    return sensitivity;
2217}
2218
2219
2220/*===========================================================================
2221 * FUNCTION   : AddSetParmEntryToBatch
2222 *
2223 * DESCRIPTION: add set parameter entry into batch
2224 *
2225 * PARAMETERS :
2226 *   @p_table     : ptr to parameter buffer
2227 *   @paramType   : parameter type
2228 *   @paramLength : length of parameter value
2229 *   @paramValue  : ptr to parameter value
2230 *
2231 * RETURN     : int32_t type of status
2232 *              NO_ERROR  -- success
2233 *              none-zero failure code
2234 *==========================================================================*/
2235int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2236                                                          cam_intf_parm_type_t paramType,
2237                                                          uint32_t paramLength,
2238                                                          void *paramValue)
2239{
2240    int position = paramType;
2241    int current, next;
2242
2243    /*************************************************************************
2244    *                 Code to take care of linking next flags                *
2245    *************************************************************************/
2246    current = GET_FIRST_PARAM_ID(p_table);
2247    if (position == current){
2248        //DO NOTHING
2249    } else if (position < current){
2250        SET_NEXT_PARAM_ID(position, p_table, current);
2251        SET_FIRST_PARAM_ID(p_table, position);
2252    } else {
2253        /* Search for the position in the linked list where we need to slot in*/
2254        while (position > GET_NEXT_PARAM_ID(current, p_table))
2255            current = GET_NEXT_PARAM_ID(current, p_table);
2256
2257        /*If node already exists no need to alter linking*/
2258        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2259            next = GET_NEXT_PARAM_ID(current, p_table);
2260            SET_NEXT_PARAM_ID(current, p_table, position);
2261            SET_NEXT_PARAM_ID(position, p_table, next);
2262        }
2263    }
2264
2265    /*************************************************************************
2266    *                   Copy contents into entry                             *
2267    *************************************************************************/
2268
2269    if (paramLength > sizeof(parm_type_t)) {
2270        ALOGE("%s:Size of input larger than max entry size",__func__);
2271        return BAD_VALUE;
2272    }
2273    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2274    return NO_ERROR;
2275}
2276
2277/*===========================================================================
2278 * FUNCTION   : lookupFwkName
2279 *
2280 * DESCRIPTION: In case the enum is not same in fwk and backend
2281 *              make sure the parameter is correctly propogated
2282 *
2283 * PARAMETERS  :
2284 *   @arr      : map between the two enums
2285 *   @len      : len of the map
2286 *   @hal_name : name of the hal_parm to map
2287 *
2288 * RETURN     : int type of status
2289 *              fwk_name  -- success
2290 *              none-zero failure code
2291 *==========================================================================*/
2292int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2293                                             int len, int hal_name)
2294{
2295
2296    for (int i = 0; i < len; i++) {
2297        if (arr[i].hal_name == hal_name)
2298            return arr[i].fwk_name;
2299    }
2300
2301    /* Not able to find matching framework type is not necessarily
2302     * an error case. This happens when mm-camera supports more attributes
2303     * than the frameworks do */
2304    ALOGD("%s: Cannot find matching framework type", __func__);
2305    return NAME_NOT_FOUND;
2306}
2307
2308/*===========================================================================
2309 * FUNCTION   : lookupHalName
2310 *
2311 * DESCRIPTION: In case the enum is not same in fwk and backend
2312 *              make sure the parameter is correctly propogated
2313 *
2314 * PARAMETERS  :
2315 *   @arr      : map between the two enums
2316 *   @len      : len of the map
2317 *   @fwk_name : name of the hal_parm to map
2318 *
2319 * RETURN     : int32_t type of status
2320 *              hal_name  -- success
2321 *              none-zero failure code
2322 *==========================================================================*/
2323int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2324                                             int len, int fwk_name)
2325{
2326    for (int i = 0; i < len; i++) {
2327       if (arr[i].fwk_name == fwk_name)
2328           return arr[i].hal_name;
2329    }
2330    ALOGE("%s: Cannot find matching hal type", __func__);
2331    return NAME_NOT_FOUND;
2332}
2333
2334/*===========================================================================
2335 * FUNCTION   : getCapabilities
2336 *
2337 * DESCRIPTION: query camera capabilities
2338 *
2339 * PARAMETERS :
2340 *   @cameraId  : camera Id
2341 *   @info      : camera info struct to be filled in with camera capabilities
2342 *
2343 * RETURN     : int32_t type of status
2344 *              NO_ERROR  -- success
2345 *              none-zero failure code
2346 *==========================================================================*/
2347int QCamera3HardwareInterface::getCamInfo(int cameraId,
2348                                    struct camera_info *info)
2349{
2350    int rc = 0;
2351
2352    if (NULL == gCamCapability[cameraId]) {
2353        rc = initCapabilities(cameraId);
2354        if (rc < 0) {
2355            //pthread_mutex_unlock(&g_camlock);
2356            return rc;
2357        }
2358    }
2359
2360    if (NULL == gStaticMetadata[cameraId]) {
2361        rc = initStaticMetadata(cameraId);
2362        if (rc < 0) {
2363            return rc;
2364        }
2365    }
2366
2367    switch(gCamCapability[cameraId]->position) {
2368    case CAM_POSITION_BACK:
2369        info->facing = CAMERA_FACING_BACK;
2370        break;
2371
2372    case CAM_POSITION_FRONT:
2373        info->facing = CAMERA_FACING_FRONT;
2374        break;
2375
2376    default:
2377        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2378        rc = -1;
2379        break;
2380    }
2381
2382
2383    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2384    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2385    info->static_camera_characteristics = gStaticMetadata[cameraId];
2386
2387    return rc;
2388}
2389
2390/*===========================================================================
2391 * FUNCTION   : translateMetadata
2392 *
2393 * DESCRIPTION: translate the metadata into camera_metadata_t
2394 *
2395 * PARAMETERS : type of the request
2396 *
2397 *
2398 * RETURN     : success: camera_metadata_t*
2399 *              failure: NULL
2400 *
2401 *==========================================================================*/
2402camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2403{
2404    pthread_mutex_lock(&mMutex);
2405
2406    if (mDefaultMetadata[type] != NULL) {
2407        pthread_mutex_unlock(&mMutex);
2408        return mDefaultMetadata[type];
2409    }
2410    //first time we are handling this request
2411    //fill up the metadata structure using the wrapper class
2412    CameraMetadata settings;
2413    //translate from cam_capability_t to camera_metadata_tag_t
2414    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2415    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2416
2417    /*control*/
2418
2419    uint8_t controlIntent = 0;
2420    switch (type) {
2421      case CAMERA3_TEMPLATE_PREVIEW:
2422        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2423        break;
2424      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2425        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2426        break;
2427      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2428        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2429        break;
2430      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2431        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2432        break;
2433      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2434        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2435        break;
2436      default:
2437        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2438        break;
2439    }
2440    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2441
2442    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2443            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2444
2445    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2446    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2447
2448    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2449    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2450
2451    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2452    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2453
2454    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2455    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2456
2457    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2458    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2459
2460    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2461    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2462
2463    static uint8_t focusMode;
2464    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2465        ALOGE("%s: Setting focus mode to auto", __func__);
2466        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2467    } else {
2468        ALOGE("%s: Setting focus mode to off", __func__);
2469        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2470    }
2471    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2472
2473    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2474    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2475
2476    /*flash*/
2477    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2478    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2479
2480
2481    /* lens */
2482    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2483    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2484
2485    if (gCamCapability[mCameraId]->filter_densities_count) {
2486        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2487        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2488                        gCamCapability[mCameraId]->filter_densities_count);
2489    }
2490
2491    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2492    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2493
2494    mDefaultMetadata[type] = settings.release();
2495
2496    pthread_mutex_unlock(&mMutex);
2497    return mDefaultMetadata[type];
2498}
2499
2500/*===========================================================================
2501 * FUNCTION   : setFrameParameters
2502 *
2503 * DESCRIPTION: set parameters per frame as requested in the metadata from
2504 *              framework
2505 *
2506 * PARAMETERS :
2507 *   @settings  : frame settings information from framework
2508 *
2509 *
2510 * RETURN     : success: NO_ERROR
2511 *              failure:
2512 *==========================================================================*/
2513int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2514                                                  const camera_metadata_t *settings)
2515{
2516    /*translate from camera_metadata_t type to parm_type_t*/
2517    int rc = 0;
2518    if (settings == NULL && mFirstRequest) {
2519        /*settings cannot be null for the first request*/
2520        return BAD_VALUE;
2521    }
2522
2523    int32_t hal_version = CAM_HAL_V3;
2524
2525    memset(mParameters, 0, sizeof(parm_buffer_t));
2526    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2527    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2528                sizeof(hal_version), &hal_version);
2529
2530    /*we need to update the frame number in the parameters*/
2531    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2532                                sizeof(frame_id), &frame_id);
2533    if (rc < 0) {
2534        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2535        return BAD_VALUE;
2536    }
2537
2538    if(settings != NULL){
2539        rc = translateMetadataToParameters(settings);
2540    }
2541    /*set the parameters to backend*/
2542    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2543    return rc;
2544}
2545
2546/*===========================================================================
2547 * FUNCTION   : translateMetadataToParameters
2548 *
2549 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2550 *
2551 *
2552 * PARAMETERS :
2553 *   @settings  : frame settings information from framework
2554 *
2555 *
2556 * RETURN     : success: NO_ERROR
2557 *              failure:
2558 *==========================================================================*/
2559int QCamera3HardwareInterface::translateMetadataToParameters
2560                                  (const camera_metadata_t *settings)
2561{
2562    int rc = 0;
2563    CameraMetadata frame_settings;
2564    frame_settings = settings;
2565
2566
2567    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2568        int32_t antibandingMode =
2569            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2570        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2571                sizeof(antibandingMode), &antibandingMode);
2572    }
2573
2574    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2575        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2576        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2577          sizeof(expCompensation), &expCompensation);
2578    }
2579
2580    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2581        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2582        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2583                sizeof(aeLock), &aeLock);
2584    }
2585    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2586        cam_fps_range_t fps_range;
2587        fps_range.min_fps =
2588            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2589        fps_range.max_fps =
2590            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2591        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2592                sizeof(fps_range), &fps_range);
2593    }
2594
2595    float focalDistance = -1.0;
2596    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2597        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2598        rc = AddSetParmEntryToBatch(mParameters,
2599                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2600                sizeof(focalDistance), &focalDistance);
2601    }
2602
2603    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2604        uint8_t fwk_focusMode =
2605            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2606        uint8_t focusMode;
2607        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2608            focusMode = CAM_FOCUS_MODE_INFINITY;
2609        } else{
2610         focusMode = lookupHalName(FOCUS_MODES_MAP,
2611                                   sizeof(FOCUS_MODES_MAP),
2612                                   fwk_focusMode);
2613        }
2614        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2615                sizeof(focusMode), &focusMode);
2616    }
2617
2618    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2619        uint8_t awbLock =
2620            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2621        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2622                sizeof(awbLock), &awbLock);
2623    }
2624
2625    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2626        uint8_t fwk_whiteLevel =
2627            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2628        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2629                sizeof(WHITE_BALANCE_MODES_MAP),
2630                fwk_whiteLevel);
2631        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2632                sizeof(whiteLevel), &whiteLevel);
2633    }
2634
2635    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2636        uint8_t fwk_effectMode =
2637            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2638        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2639                sizeof(EFFECT_MODES_MAP),
2640                fwk_effectMode);
2641        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2642                sizeof(effectMode), &effectMode);
2643    }
2644
2645    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2646        uint8_t fwk_aeMode =
2647            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2648        uint8_t aeMode;
2649        int32_t redeye;
2650        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2651            aeMode = CAM_AE_MODE_OFF;
2652        } else {
2653            aeMode = CAM_AE_MODE_ON;
2654        }
2655        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2656            redeye = 1;
2657        } else {
2658            redeye = 0;
2659        }
2660        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2661                                          sizeof(AE_FLASH_MODE_MAP),
2662                                          fwk_aeMode);
2663        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2664                sizeof(aeMode), &aeMode);
2665        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2666                sizeof(flashMode), &flashMode);
2667        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2668                sizeof(redeye), &redeye);
2669    }
2670
2671    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2672        uint8_t colorCorrectMode =
2673            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2674        rc =
2675            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2676                    sizeof(colorCorrectMode), &colorCorrectMode);
2677    }
2678    cam_trigger_t aecTrigger;
2679    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2680    aecTrigger.trigger_id = -1;
2681    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2682        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2683        aecTrigger.trigger =
2684            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2685        aecTrigger.trigger_id =
2686            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2687    }
2688    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2689                                sizeof(aecTrigger), &aecTrigger);
2690
2691    /*af_trigger must come with a trigger id*/
2692    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2693        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2694        cam_trigger_t af_trigger;
2695        af_trigger.trigger =
2696            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2697        af_trigger.trigger_id =
2698            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2699        rc = AddSetParmEntryToBatch(mParameters,
2700                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2701    }
2702
2703    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2704        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2705        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2706                sizeof(metaMode), &metaMode);
2707        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2708           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2709           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2710                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2711                                             fwk_sceneMode);
2712           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2713                sizeof(sceneMode), &sceneMode);
2714        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2715           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2716           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2717                sizeof(sceneMode), &sceneMode);
2718        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2719           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2720           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2721                sizeof(sceneMode), &sceneMode);
2722        }
2723    }
2724
2725    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2726        int32_t demosaic =
2727            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2728        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2729                sizeof(demosaic), &demosaic);
2730    }
2731
2732    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2733        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2734        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2735                sizeof(edgeMode), &edgeMode);
2736    }
2737
2738    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2739        int32_t edgeStrength =
2740            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2741        rc = AddSetParmEntryToBatch(mParameters,
2742                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2743    }
2744
2745    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2746        uint8_t flashMode =
2747            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2748        rc = AddSetParmEntryToBatch(mParameters,
2749                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2750    }
2751
2752    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2753        uint8_t flashPower =
2754            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2755        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2756                sizeof(flashPower), &flashPower);
2757    }
2758
2759    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2760        int64_t flashFiringTime =
2761            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2762        rc = AddSetParmEntryToBatch(mParameters,
2763                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2764    }
2765
2766    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2767        uint8_t geometricMode =
2768            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2769        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2770                sizeof(geometricMode), &geometricMode);
2771    }
2772
2773    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2774        uint8_t geometricStrength =
2775            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2776        rc = AddSetParmEntryToBatch(mParameters,
2777                CAM_INTF_META_GEOMETRIC_STRENGTH,
2778                sizeof(geometricStrength), &geometricStrength);
2779    }
2780
2781    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2782        uint8_t hotPixelMode =
2783            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2784        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2785                sizeof(hotPixelMode), &hotPixelMode);
2786    }
2787
2788    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2789        float lensAperture =
2790            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2791        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2792                sizeof(lensAperture), &lensAperture);
2793    }
2794
2795    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2796        float filterDensity =
2797            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2798        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2799                sizeof(filterDensity), &filterDensity);
2800    }
2801
2802    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2803        float focalLength =
2804            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2805        rc = AddSetParmEntryToBatch(mParameters,
2806                CAM_INTF_META_LENS_FOCAL_LENGTH,
2807                sizeof(focalLength), &focalLength);
2808    }
2809
2810    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2811        uint8_t optStabMode =
2812            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2813        rc = AddSetParmEntryToBatch(mParameters,
2814                CAM_INTF_META_LENS_OPT_STAB_MODE,
2815                sizeof(optStabMode), &optStabMode);
2816    }
2817
2818    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2819        uint8_t noiseRedMode =
2820            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2821        rc = AddSetParmEntryToBatch(mParameters,
2822                CAM_INTF_META_NOISE_REDUCTION_MODE,
2823                sizeof(noiseRedMode), &noiseRedMode);
2824    }
2825
2826    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2827        uint8_t noiseRedStrength =
2828            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2829        rc = AddSetParmEntryToBatch(mParameters,
2830                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2831                sizeof(noiseRedStrength), &noiseRedStrength);
2832    }
2833
2834    cam_crop_region_t scalerCropRegion;
2835    bool scalerCropSet = false;
2836    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2837        scalerCropRegion.left =
2838            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2839        scalerCropRegion.top =
2840            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2841        scalerCropRegion.width =
2842            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2843        scalerCropRegion.height =
2844            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2845        rc = AddSetParmEntryToBatch(mParameters,
2846                CAM_INTF_META_SCALER_CROP_REGION,
2847                sizeof(scalerCropRegion), &scalerCropRegion);
2848        scalerCropSet = true;
2849    }
2850
2851    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2852        int64_t sensorExpTime =
2853            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2854        rc = AddSetParmEntryToBatch(mParameters,
2855                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2856                sizeof(sensorExpTime), &sensorExpTime);
2857    }
2858
2859    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2860        int64_t sensorFrameDuration =
2861            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2862        rc = AddSetParmEntryToBatch(mParameters,
2863                CAM_INTF_META_SENSOR_FRAME_DURATION,
2864                sizeof(sensorFrameDuration), &sensorFrameDuration);
2865    }
2866
2867    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2868        int32_t sensorSensitivity =
2869            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2870        rc = AddSetParmEntryToBatch(mParameters,
2871                CAM_INTF_META_SENSOR_SENSITIVITY,
2872                sizeof(sensorSensitivity), &sensorSensitivity);
2873    }
2874
2875    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2876        int32_t shadingMode =
2877            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2878        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2879                sizeof(shadingMode), &shadingMode);
2880    }
2881
2882    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2883        uint8_t shadingStrength =
2884            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2885        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2886                sizeof(shadingStrength), &shadingStrength);
2887    }
2888
2889    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2890        uint8_t facedetectMode =
2891            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2892        rc = AddSetParmEntryToBatch(mParameters,
2893                CAM_INTF_META_STATS_FACEDETECT_MODE,
2894                sizeof(facedetectMode), &facedetectMode);
2895    }
2896
2897    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2898        uint8_t histogramMode =
2899            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2900        rc = AddSetParmEntryToBatch(mParameters,
2901                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2902                sizeof(histogramMode), &histogramMode);
2903    }
2904
2905    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2906        uint8_t sharpnessMapMode =
2907            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2908        rc = AddSetParmEntryToBatch(mParameters,
2909                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2910                sizeof(sharpnessMapMode), &sharpnessMapMode);
2911    }
2912
2913    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2914        uint8_t tonemapMode =
2915            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2916        rc = AddSetParmEntryToBatch(mParameters,
2917                CAM_INTF_META_TONEMAP_MODE,
2918                sizeof(tonemapMode), &tonemapMode);
2919    }
2920
2921    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2922        uint8_t captureIntent =
2923            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2924        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2925                sizeof(captureIntent), &captureIntent);
2926    }
2927
2928    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2929        cam_area_t roi;
2930        bool reset = true;
2931        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2932        if (scalerCropSet) {
2933            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2934        }
2935        if (reset) {
2936            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2937                    sizeof(roi), &roi);
2938        }
2939    }
2940
2941    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2942        cam_area_t roi;
2943        bool reset = true;
2944        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2945        if (scalerCropSet) {
2946            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2947        }
2948        if (reset) {
2949            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2950                    sizeof(roi), &roi);
2951        }
2952    }
2953
2954    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2955        cam_area_t roi;
2956        bool reset = true;
2957        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2958        if (scalerCropSet) {
2959            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2960        }
2961        if (reset) {
2962            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2963                    sizeof(roi), &roi);
2964        }
2965    }
2966    return rc;
2967}
2968
2969/*===========================================================================
2970 * FUNCTION   : getJpegSettings
2971 *
2972 * DESCRIPTION: save the jpeg settings in the HAL
2973 *
2974 *
2975 * PARAMETERS :
2976 *   @settings  : frame settings information from framework
2977 *
2978 *
2979 * RETURN     : success: NO_ERROR
2980 *              failure:
2981 *==========================================================================*/
2982int QCamera3HardwareInterface::getJpegSettings
2983                                  (const camera_metadata_t *settings)
2984{
2985    if (mJpegSettings) {
2986        if (mJpegSettings->gps_timestamp) {
2987            free(mJpegSettings->gps_timestamp);
2988            mJpegSettings->gps_timestamp = NULL;
2989        }
2990        if (mJpegSettings->gps_coordinates) {
2991            for (int i = 0; i < 3; i++) {
2992                free(mJpegSettings->gps_coordinates[i]);
2993                mJpegSettings->gps_coordinates[i] = NULL;
2994            }
2995        }
2996        free(mJpegSettings);
2997        mJpegSettings = NULL;
2998    }
2999    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3000    CameraMetadata jpeg_settings;
3001    jpeg_settings = settings;
3002
3003    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3004        mJpegSettings->jpeg_orientation =
3005            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3006    } else {
3007        mJpegSettings->jpeg_orientation = 0;
3008    }
3009    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3010        mJpegSettings->jpeg_quality =
3011            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3012    } else {
3013        mJpegSettings->jpeg_quality = 85;
3014    }
3015    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3016        mJpegSettings->thumbnail_size.width =
3017            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3018        mJpegSettings->thumbnail_size.height =
3019            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3020    } else {
3021        mJpegSettings->thumbnail_size.width = 0;
3022        mJpegSettings->thumbnail_size.height = 0;
3023    }
3024    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3025        for (int i = 0; i < 3; i++) {
3026            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3027            *(mJpegSettings->gps_coordinates[i]) =
3028                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3029        }
3030    } else{
3031       for (int i = 0; i < 3; i++) {
3032            mJpegSettings->gps_coordinates[i] = NULL;
3033        }
3034    }
3035
3036    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3037        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3038        *(mJpegSettings->gps_timestamp) =
3039            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3040    } else {
3041        mJpegSettings->gps_timestamp = NULL;
3042    }
3043
3044    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3045        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3046        for (int i = 0; i < len; i++) {
3047            mJpegSettings->gps_processing_method[i] =
3048                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3049        }
3050        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3051            mJpegSettings->gps_processing_method[len] = '\0';
3052        }
3053    } else {
3054        mJpegSettings->gps_processing_method[0] = '\0';
3055    }
3056
3057    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3058        mJpegSettings->sensor_sensitivity =
3059            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3060    } else {
3061        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3062    }
3063
3064    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3065        mJpegSettings->lens_focal_length =
3066            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3067    }
3068    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3069        mJpegSettings->exposure_compensation =
3070            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3071    }
3072    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3073    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3074    return 0;
3075}
3076
3077/*===========================================================================
3078 * FUNCTION   : captureResultCb
3079 *
3080 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3081 *
3082 * PARAMETERS :
3083 *   @frame  : frame information from mm-camera-interface
3084 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3085 *   @userdata: userdata
3086 *
3087 * RETURN     : NONE
3088 *==========================================================================*/
3089void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3090                camera3_stream_buffer_t *buffer,
3091                uint32_t frame_number, void *userdata)
3092{
3093    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3094    if (hw == NULL) {
3095        ALOGE("%s: Invalid hw %p", __func__, hw);
3096        return;
3097    }
3098
3099    hw->captureResultCb(metadata, buffer, frame_number);
3100    return;
3101}
3102
3103/*===========================================================================
3104 * FUNCTION   : initialize
3105 *
3106 * DESCRIPTION: Pass framework callback pointers to HAL
3107 *
3108 * PARAMETERS :
3109 *
3110 *
3111 * RETURN     : Success : 0
3112 *              Failure: -ENODEV
3113 *==========================================================================*/
3114
3115int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3116                                  const camera3_callback_ops_t *callback_ops)
3117{
3118    ALOGV("%s: E", __func__);
3119    QCamera3HardwareInterface *hw =
3120        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3121    if (!hw) {
3122        ALOGE("%s: NULL camera device", __func__);
3123        return -ENODEV;
3124    }
3125
3126    int rc = hw->initialize(callback_ops);
3127    ALOGV("%s: X", __func__);
3128    return rc;
3129}
3130
3131/*===========================================================================
3132 * FUNCTION   : configure_streams
3133 *
3134 * DESCRIPTION:
3135 *
3136 * PARAMETERS :
3137 *
3138 *
3139 * RETURN     : Success: 0
3140 *              Failure: -EINVAL (if stream configuration is invalid)
3141 *                       -ENODEV (fatal error)
3142 *==========================================================================*/
3143
3144int QCamera3HardwareInterface::configure_streams(
3145        const struct camera3_device *device,
3146        camera3_stream_configuration_t *stream_list)
3147{
3148    ALOGV("%s: E", __func__);
3149    QCamera3HardwareInterface *hw =
3150        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3151    if (!hw) {
3152        ALOGE("%s: NULL camera device", __func__);
3153        return -ENODEV;
3154    }
3155    int rc = hw->configureStreams(stream_list);
3156    ALOGV("%s: X", __func__);
3157    return rc;
3158}
3159
3160/*===========================================================================
3161 * FUNCTION   : register_stream_buffers
3162 *
3163 * DESCRIPTION: Register stream buffers with the device
3164 *
3165 * PARAMETERS :
3166 *
3167 * RETURN     :
3168 *==========================================================================*/
3169int QCamera3HardwareInterface::register_stream_buffers(
3170        const struct camera3_device *device,
3171        const camera3_stream_buffer_set_t *buffer_set)
3172{
3173    ALOGV("%s: E", __func__);
3174    QCamera3HardwareInterface *hw =
3175        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3176    if (!hw) {
3177        ALOGE("%s: NULL camera device", __func__);
3178        return -ENODEV;
3179    }
3180    int rc = hw->registerStreamBuffers(buffer_set);
3181    ALOGV("%s: X", __func__);
3182    return rc;
3183}
3184
3185/*===========================================================================
3186 * FUNCTION   : construct_default_request_settings
3187 *
3188 * DESCRIPTION: Configure a settings buffer to meet the required use case
3189 *
3190 * PARAMETERS :
3191 *
3192 *
3193 * RETURN     : Success: Return valid metadata
3194 *              Failure: Return NULL
3195 *==========================================================================*/
3196const camera_metadata_t* QCamera3HardwareInterface::
3197    construct_default_request_settings(const struct camera3_device *device,
3198                                        int type)
3199{
3200
3201    ALOGV("%s: E", __func__);
3202    camera_metadata_t* fwk_metadata = NULL;
3203    QCamera3HardwareInterface *hw =
3204        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3205    if (!hw) {
3206        ALOGE("%s: NULL camera device", __func__);
3207        return NULL;
3208    }
3209
3210    fwk_metadata = hw->translateCapabilityToMetadata(type);
3211
3212    ALOGV("%s: X", __func__);
3213    return fwk_metadata;
3214}
3215
3216/*===========================================================================
3217 * FUNCTION   : process_capture_request
3218 *
3219 * DESCRIPTION:
3220 *
3221 * PARAMETERS :
3222 *
3223 *
3224 * RETURN     :
3225 *==========================================================================*/
3226int QCamera3HardwareInterface::process_capture_request(
3227                    const struct camera3_device *device,
3228                    camera3_capture_request_t *request)
3229{
3230    ALOGV("%s: E", __func__);
3231    QCamera3HardwareInterface *hw =
3232        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3233    if (!hw) {
3234        ALOGE("%s: NULL camera device", __func__);
3235        return -EINVAL;
3236    }
3237
3238    int rc = hw->processCaptureRequest(request);
3239    ALOGV("%s: X", __func__);
3240    return rc;
3241}
3242
3243/*===========================================================================
3244 * FUNCTION   : get_metadata_vendor_tag_ops
3245 *
3246 * DESCRIPTION:
3247 *
3248 * PARAMETERS :
3249 *
3250 *
3251 * RETURN     :
3252 *==========================================================================*/
3253
3254void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3255                const struct camera3_device *device,
3256                vendor_tag_query_ops_t* ops)
3257{
3258    ALOGV("%s: E", __func__);
3259    QCamera3HardwareInterface *hw =
3260        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3261    if (!hw) {
3262        ALOGE("%s: NULL camera device", __func__);
3263        return;
3264    }
3265
3266    hw->getMetadataVendorTagOps(ops);
3267    ALOGV("%s: X", __func__);
3268    return;
3269}
3270
3271/*===========================================================================
3272 * FUNCTION   : dump
3273 *
3274 * DESCRIPTION:
3275 *
3276 * PARAMETERS :
3277 *
3278 *
3279 * RETURN     :
3280 *==========================================================================*/
3281
3282void QCamera3HardwareInterface::dump(
3283                const struct camera3_device *device, int fd)
3284{
3285    ALOGV("%s: E", __func__);
3286    QCamera3HardwareInterface *hw =
3287        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3288    if (!hw) {
3289        ALOGE("%s: NULL camera device", __func__);
3290        return;
3291    }
3292
3293    hw->dump(fd);
3294    ALOGV("%s: X", __func__);
3295    return;
3296}
3297
3298/*===========================================================================
3299 * FUNCTION   : close_camera_device
3300 *
3301 * DESCRIPTION:
3302 *
3303 * PARAMETERS :
3304 *
3305 *
3306 * RETURN     :
3307 *==========================================================================*/
3308int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3309{
3310    ALOGV("%s: E", __func__);
3311    int ret = NO_ERROR;
3312    QCamera3HardwareInterface *hw =
3313        reinterpret_cast<QCamera3HardwareInterface *>(
3314            reinterpret_cast<camera3_device_t *>(device)->priv);
3315    if (!hw) {
3316        ALOGE("NULL camera device");
3317        return BAD_VALUE;
3318    }
3319    delete hw;
3320
3321    pthread_mutex_lock(&mCameraSessionLock);
3322    mCameraSessionActive = 0;
3323    pthread_mutex_unlock(&mCameraSessionLock);
3324    ALOGV("%s: X", __func__);
3325    return ret;
3326}
3327
3328}; //end namespace qcamera
3329