QCamera3HWI.cpp revision 49542da1f1db437888282ee70a89e5c1a0b03947
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mPictureChannel(NULL),
162      mFirstRequest(false),
163      mParamHeap(NULL),
164      mParameters(NULL),
165      mJpegSettings(NULL),
166      mIsZslMode(false),
167      m_pPowerModule(NULL)
168{
169    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
170    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
171    mCameraDevice.common.close = close_camera_device;
172    mCameraDevice.ops = &mCameraOps;
173    mCameraDevice.priv = this;
174    gCamCapability[cameraId]->version = CAM_HAL_V3;
175    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
176    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
177    gCamCapability[cameraId]->min_num_pp_bufs = 3;
178
179    pthread_cond_init(&mRequestCond, NULL);
180    mPendingRequest = 0;
181    mCurrentRequestId = -1;
182    pthread_mutex_init(&mMutex, NULL);
183
184    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
185        mDefaultMetadata[i] = NULL;
186
187#ifdef HAS_MULTIMEDIA_HINTS
188    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
189        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
190    }
191#endif
192}
193
194/*===========================================================================
195 * FUNCTION   : ~QCamera3HardwareInterface
196 *
197 * DESCRIPTION: destructor of QCamera3HardwareInterface
198 *
199 * PARAMETERS : none
200 *
201 * RETURN     : none
202 *==========================================================================*/
203QCamera3HardwareInterface::~QCamera3HardwareInterface()
204{
205    ALOGV("%s: E", __func__);
206    /* We need to stop all streams before deleting any stream */
207    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
208        it != mStreamInfo.end(); it++) {
209        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
210        if (channel)
211           channel->stop();
212    }
213    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
214        it != mStreamInfo.end(); it++) {
215        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
216        if (channel)
217            delete channel;
218        free (*it);
219    }
220
221    mPictureChannel = NULL;
222
223    if (mJpegSettings != NULL) {
224        free(mJpegSettings);
225        mJpegSettings = NULL;
226    }
227
228    /* Clean up all channels */
229    if (mCameraInitialized) {
230        mMetadataChannel->stop();
231        delete mMetadataChannel;
232        mMetadataChannel = NULL;
233        deinitParameters();
234    }
235
236    if (mCameraOpened)
237        closeCamera();
238
239    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
240        if (mDefaultMetadata[i])
241            free_camera_metadata(mDefaultMetadata[i]);
242
243    pthread_cond_destroy(&mRequestCond);
244
245    pthread_mutex_destroy(&mMutex);
246    ALOGV("%s: X", __func__);
247}
248
249/*===========================================================================
250 * FUNCTION   : openCamera
251 *
252 * DESCRIPTION: open camera
253 *
254 * PARAMETERS :
255 *   @hw_device  : double ptr for camera device struct
256 *
257 * RETURN     : int32_t type of status
258 *              NO_ERROR  -- success
259 *              none-zero failure code
260 *==========================================================================*/
261int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
262{
263    int rc = 0;
264    pthread_mutex_lock(&mCameraSessionLock);
265    if (mCameraSessionActive) {
266        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
267        pthread_mutex_unlock(&mCameraSessionLock);
268        return INVALID_OPERATION;
269    }
270
271    if (mCameraOpened) {
272        *hw_device = NULL;
273        return PERMISSION_DENIED;
274    }
275
276    rc = openCamera();
277    if (rc == 0) {
278        *hw_device = &mCameraDevice.common;
279        mCameraSessionActive = 1;
280    } else
281        *hw_device = NULL;
282
283#ifdef HAS_MULTIMEDIA_HINTS
284    if (rc == 0) {
285        if (m_pPowerModule) {
286            if (m_pPowerModule->powerHint) {
287                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
288                        (void *)"state=1");
289            }
290        }
291    }
292#endif
293    pthread_mutex_unlock(&mCameraSessionLock);
294    return rc;
295}
296
297/*===========================================================================
298 * FUNCTION   : openCamera
299 *
300 * DESCRIPTION: open camera
301 *
302 * PARAMETERS : none
303 *
304 * RETURN     : int32_t type of status
305 *              NO_ERROR  -- success
306 *              none-zero failure code
307 *==========================================================================*/
308int QCamera3HardwareInterface::openCamera()
309{
310    if (mCameraHandle) {
311        ALOGE("Failure: Camera already opened");
312        return ALREADY_EXISTS;
313    }
314    mCameraHandle = camera_open(mCameraId);
315    if (!mCameraHandle) {
316        ALOGE("camera_open failed.");
317        return UNKNOWN_ERROR;
318    }
319
320    mCameraOpened = true;
321
322    return NO_ERROR;
323}
324
325/*===========================================================================
326 * FUNCTION   : closeCamera
327 *
328 * DESCRIPTION: close camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::closeCamera()
337{
338    int rc = NO_ERROR;
339
340    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
341    mCameraHandle = NULL;
342    mCameraOpened = false;
343
344#ifdef HAS_MULTIMEDIA_HINTS
345    if (rc == NO_ERROR) {
346        if (m_pPowerModule) {
347            if (m_pPowerModule->powerHint) {
348                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
349                        (void *)"state=0");
350            }
351        }
352    }
353#endif
354
355    return rc;
356}
357
358/*===========================================================================
359 * FUNCTION   : initialize
360 *
361 * DESCRIPTION: Initialize frameworks callback functions
362 *
363 * PARAMETERS :
364 *   @callback_ops : callback function to frameworks
365 *
366 * RETURN     :
367 *
368 *==========================================================================*/
369int QCamera3HardwareInterface::initialize(
370        const struct camera3_callback_ops *callback_ops)
371{
372    int rc;
373
374    pthread_mutex_lock(&mMutex);
375
376    rc = initParameters();
377    if (rc < 0) {
378        ALOGE("%s: initParamters failed %d", __func__, rc);
379       goto err1;
380    }
381    //Create metadata channel and initialize it
382    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
383                    mCameraHandle->ops, captureResultCb,
384                    &gCamCapability[mCameraId]->padding_info, this);
385    if (mMetadataChannel == NULL) {
386        ALOGE("%s: failed to allocate metadata channel", __func__);
387        rc = -ENOMEM;
388        goto err2;
389    }
390    rc = mMetadataChannel->initialize();
391    if (rc < 0) {
392        ALOGE("%s: metadata channel initialization failed", __func__);
393        goto err3;
394    }
395
396    mCallbackOps = callback_ops;
397
398    pthread_mutex_unlock(&mMutex);
399    mCameraInitialized = true;
400    return 0;
401
402err3:
403    delete mMetadataChannel;
404    mMetadataChannel = NULL;
405err2:
406    deinitParameters();
407err1:
408    pthread_mutex_unlock(&mMutex);
409    return rc;
410}
411
412/*===========================================================================
413 * FUNCTION   : configureStreams
414 *
415 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
416 *              and output streams.
417 *
418 * PARAMETERS :
419 *   @stream_list : streams to be configured
420 *
421 * RETURN     :
422 *
423 *==========================================================================*/
424int QCamera3HardwareInterface::configureStreams(
425        camera3_stream_configuration_t *streamList)
426{
427    int rc = 0;
428    pthread_mutex_lock(&mMutex);
429    // Sanity check stream_list
430    if (streamList == NULL) {
431        ALOGE("%s: NULL stream configuration", __func__);
432        pthread_mutex_unlock(&mMutex);
433        return BAD_VALUE;
434    }
435
436    if (streamList->streams == NULL) {
437        ALOGE("%s: NULL stream list", __func__);
438        pthread_mutex_unlock(&mMutex);
439        return BAD_VALUE;
440    }
441
442    if (streamList->num_streams < 1) {
443        ALOGE("%s: Bad number of streams requested: %d", __func__,
444                streamList->num_streams);
445        pthread_mutex_unlock(&mMutex);
446        return BAD_VALUE;
447    }
448
449    camera3_stream_t *inputStream = NULL;
450    camera3_stream_t *jpegStream = NULL;
451    /* first invalidate all the steams in the mStreamList
452     * if they appear again, they will be validated */
453    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
454            it != mStreamInfo.end(); it++) {
455        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
456        channel->stop();
457        (*it)->status = INVALID;
458    }
459
460    for (size_t i = 0; i < streamList->num_streams; i++) {
461        camera3_stream_t *newStream = streamList->streams[i];
462        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
463                __func__, newStream->stream_type, newStream->format,
464                 newStream->width, newStream->height);
465        //if the stream is in the mStreamList validate it
466        bool stream_exists = false;
467        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
468                it != mStreamInfo.end(); it++) {
469            if ((*it)->stream == newStream) {
470                QCamera3Channel *channel =
471                    (QCamera3Channel*)(*it)->stream->priv;
472                stream_exists = true;
473                (*it)->status = RECONFIGURE;
474                /*delete the channel object associated with the stream because
475                  we need to reconfigure*/
476                delete channel;
477                (*it)->stream->priv = NULL;
478            }
479        }
480        if (!stream_exists) {
481            //new stream
482            stream_info_t* stream_info;
483            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
484            stream_info->stream = newStream;
485            stream_info->status = VALID;
486            stream_info->registered = 0;
487            mStreamInfo.push_back(stream_info);
488        }
489        if (newStream->stream_type == CAMERA3_STREAM_INPUT
490                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
491            if (inputStream != NULL) {
492                ALOGE("%s: Multiple input streams requested!", __func__);
493                pthread_mutex_unlock(&mMutex);
494                return BAD_VALUE;
495            }
496            inputStream = newStream;
497        }
498        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
499            jpegStream = newStream;
500        }
501    }
502    mInputStream = inputStream;
503
504    /*clean up invalid streams*/
505    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
506            it != mStreamInfo.end();) {
507        if(((*it)->status) == INVALID){
508            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
509            delete channel;
510            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
511            free(*it);
512            it = mStreamInfo.erase(it);
513        } else {
514            it++;
515        }
516    }
517
518    //mMetadataChannel->stop();
519
520    /* Allocate channel objects for the requested streams */
521    for (size_t i = 0; i < streamList->num_streams; i++) {
522        camera3_stream_t *newStream = streamList->streams[i];
523        if (newStream->priv == NULL) {
524            //New stream, construct channel
525            switch (newStream->stream_type) {
526            case CAMERA3_STREAM_INPUT:
527                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
528                break;
529            case CAMERA3_STREAM_BIDIRECTIONAL:
530                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
531                    GRALLOC_USAGE_HW_CAMERA_WRITE;
532                break;
533            case CAMERA3_STREAM_OUTPUT:
534                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
535                break;
536            default:
537                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
538                break;
539            }
540
541            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
542                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
543                QCamera3Channel *channel;
544                switch (newStream->format) {
545                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
546                case HAL_PIXEL_FORMAT_YCbCr_420_888:
547                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
548                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
549                        jpegStream) {
550                        uint32_t width = jpegStream->width;
551                        uint32_t height = jpegStream->height;
552                        mIsZslMode = true;
553                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
554                            mCameraHandle->ops, captureResultCb,
555                            &gCamCapability[mCameraId]->padding_info, this, newStream,
556                            width, height);
557                    } else
558                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
559                            mCameraHandle->ops, captureResultCb,
560                            &gCamCapability[mCameraId]->padding_info, this, newStream);
561                    if (channel == NULL) {
562                        ALOGE("%s: allocation of channel failed", __func__);
563                        pthread_mutex_unlock(&mMutex);
564                        return -ENOMEM;
565                    }
566
567                    newStream->priv = channel;
568                    break;
569                case HAL_PIXEL_FORMAT_BLOB:
570                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
571                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
572                            mCameraHandle->ops, captureResultCb,
573                            &gCamCapability[mCameraId]->padding_info, this, newStream);
574                    if (mPictureChannel == NULL) {
575                        ALOGE("%s: allocation of channel failed", __func__);
576                        pthread_mutex_unlock(&mMutex);
577                        return -ENOMEM;
578                    }
579                    newStream->priv = (QCamera3Channel*)mPictureChannel;
580                    break;
581
582                //TODO: Add support for app consumed format?
583                default:
584                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
585                    break;
586                }
587            }
588        } else {
589            // Channel already exists for this stream
590            // Do nothing for now
591        }
592    }
593    /*For the streams to be reconfigured we need to register the buffers
594      since the framework wont*/
595    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
596            it != mStreamInfo.end(); it++) {
597        if ((*it)->status == RECONFIGURE) {
598            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
599            /*only register buffers for streams that have already been
600              registered*/
601            if ((*it)->registered) {
602                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
603                        (*it)->buffer_set.buffers);
604                if (rc != NO_ERROR) {
605                    ALOGE("%s: Failed to register the buffers of old stream,\
606                            rc = %d", __func__, rc);
607                }
608                ALOGV("%s: channel %p has %d buffers",
609                        __func__, channel, (*it)->buffer_set.num_buffers);
610            }
611        }
612
613        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
614        if (index == NAME_NOT_FOUND) {
615            mPendingBuffersMap.add((*it)->stream, 0);
616        } else {
617            mPendingBuffersMap.editValueAt(index) = 0;
618        }
619    }
620
621    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
622    mPendingRequestsList.clear();
623
624    //settings/parameters don't carry over for new configureStreams
625    memset(mParameters, 0, sizeof(parm_buffer_t));
626    mFirstRequest = true;
627
628    pthread_mutex_unlock(&mMutex);
629    return rc;
630}
631
632/*===========================================================================
633 * FUNCTION   : validateCaptureRequest
634 *
635 * DESCRIPTION: validate a capture request from camera service
636 *
637 * PARAMETERS :
638 *   @request : request from framework to process
639 *
640 * RETURN     :
641 *
642 *==========================================================================*/
643int QCamera3HardwareInterface::validateCaptureRequest(
644                    camera3_capture_request_t *request)
645{
646    ssize_t idx = 0;
647    const camera3_stream_buffer_t *b;
648    CameraMetadata meta;
649
650    /* Sanity check the request */
651    if (request == NULL) {
652        ALOGE("%s: NULL capture request", __func__);
653        return BAD_VALUE;
654    }
655
656    uint32_t frameNumber = request->frame_number;
657    if (request->input_buffer != NULL &&
658            request->input_buffer->stream != mInputStream) {
659        ALOGE("%s: Request %d: Input buffer not from input stream!",
660                __FUNCTION__, frameNumber);
661        return BAD_VALUE;
662    }
663    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
664        ALOGE("%s: Request %d: No output buffers provided!",
665                __FUNCTION__, frameNumber);
666        return BAD_VALUE;
667    }
668    if (request->input_buffer != NULL) {
669        b = request->input_buffer;
670        QCamera3Channel *channel =
671            static_cast<QCamera3Channel*>(b->stream->priv);
672        if (channel == NULL) {
673            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
674                    __func__, frameNumber, idx);
675            return BAD_VALUE;
676        }
677        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
678            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
679                    __func__, frameNumber, idx);
680            return BAD_VALUE;
681        }
682        if (b->release_fence != -1) {
683            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
684                    __func__, frameNumber, idx);
685            return BAD_VALUE;
686        }
687        if (b->buffer == NULL) {
688            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
689                    __func__, frameNumber, idx);
690            return BAD_VALUE;
691        }
692    }
693
694    // Validate all buffers
695    b = request->output_buffers;
696    do {
697        QCamera3Channel *channel =
698                static_cast<QCamera3Channel*>(b->stream->priv);
699        if (channel == NULL) {
700            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
701                    __func__, frameNumber, idx);
702            return BAD_VALUE;
703        }
704        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
705            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
706                    __func__, frameNumber, idx);
707            return BAD_VALUE;
708        }
709        if (b->release_fence != -1) {
710            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
711                    __func__, frameNumber, idx);
712            return BAD_VALUE;
713        }
714        if (b->buffer == NULL) {
715            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
716                    __func__, frameNumber, idx);
717            return BAD_VALUE;
718        }
719        idx++;
720        b = request->output_buffers + idx;
721    } while (idx < (ssize_t)request->num_output_buffers);
722
723    return NO_ERROR;
724}
725
726/*===========================================================================
727 * FUNCTION   : registerStreamBuffers
728 *
729 * DESCRIPTION: Register buffers for a given stream with the HAL device.
730 *
731 * PARAMETERS :
732 *   @stream_list : streams to be configured
733 *
734 * RETURN     :
735 *
736 *==========================================================================*/
737int QCamera3HardwareInterface::registerStreamBuffers(
738        const camera3_stream_buffer_set_t *buffer_set)
739{
740    int rc = 0;
741
742    pthread_mutex_lock(&mMutex);
743
744    if (buffer_set == NULL) {
745        ALOGE("%s: Invalid buffer_set parameter.", __func__);
746        pthread_mutex_unlock(&mMutex);
747        return -EINVAL;
748    }
749    if (buffer_set->stream == NULL) {
750        ALOGE("%s: Invalid stream parameter.", __func__);
751        pthread_mutex_unlock(&mMutex);
752        return -EINVAL;
753    }
754    if (buffer_set->num_buffers < 1) {
755        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
756        pthread_mutex_unlock(&mMutex);
757        return -EINVAL;
758    }
759    if (buffer_set->buffers == NULL) {
760        ALOGE("%s: Invalid buffers parameter.", __func__);
761        pthread_mutex_unlock(&mMutex);
762        return -EINVAL;
763    }
764
765    camera3_stream_t *stream = buffer_set->stream;
766    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
767
768    //set the buffer_set in the mStreamInfo array
769    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
770            it != mStreamInfo.end(); it++) {
771        if ((*it)->stream == stream) {
772            uint32_t numBuffers = buffer_set->num_buffers;
773            (*it)->buffer_set.stream = buffer_set->stream;
774            (*it)->buffer_set.num_buffers = numBuffers;
775            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
776            if ((*it)->buffer_set.buffers == NULL) {
777                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
778                pthread_mutex_unlock(&mMutex);
779                return -ENOMEM;
780            }
781            for (size_t j = 0; j < numBuffers; j++){
782                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
783            }
784            (*it)->registered = 1;
785        }
786    }
787    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
788    if (rc < 0) {
789        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
790        pthread_mutex_unlock(&mMutex);
791        return -ENODEV;
792    }
793
794    pthread_mutex_unlock(&mMutex);
795    return NO_ERROR;
796}
797
798/*===========================================================================
799 * FUNCTION   : processCaptureRequest
800 *
801 * DESCRIPTION: process a capture request from camera service
802 *
803 * PARAMETERS :
804 *   @request : request from framework to process
805 *
806 * RETURN     :
807 *
808 *==========================================================================*/
809int QCamera3HardwareInterface::processCaptureRequest(
810                    camera3_capture_request_t *request)
811{
812    int rc = NO_ERROR;
813    int32_t request_id;
814    CameraMetadata meta;
815
816    pthread_mutex_lock(&mMutex);
817
818    rc = validateCaptureRequest(request);
819    if (rc != NO_ERROR) {
820        ALOGE("%s: incoming request is not valid", __func__);
821        pthread_mutex_unlock(&mMutex);
822        return rc;
823    }
824
825    uint32_t frameNumber = request->frame_number;
826    uint32_t streamTypeMask = 0;
827
828    meta = request->settings;
829    if (meta.exists(ANDROID_REQUEST_ID)) {
830        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
831        mCurrentRequestId = request_id;
832        ALOGV("%s: Received request with id: %d",__func__, request_id);
833    } else if (mFirstRequest || mCurrentRequestId == -1){
834        ALOGE("%s: Unable to find request id field, \
835                & no previous id available", __func__);
836        return NAME_NOT_FOUND;
837    } else {
838        ALOGV("%s: Re-using old request id", __func__);
839        request_id = mCurrentRequestId;
840    }
841
842    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
843                                    __func__, __LINE__,
844                                    request->num_output_buffers,
845                                    request->input_buffer,
846                                    frameNumber);
847    // Acquire all request buffers first
848    int blob_request = 0;
849    for (size_t i = 0; i < request->num_output_buffers; i++) {
850        const camera3_stream_buffer_t& output = request->output_buffers[i];
851        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
852        sp<Fence> acquireFence = new Fence(output.acquire_fence);
853
854        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
855        //Call function to store local copy of jpeg data for encode params.
856            blob_request = 1;
857            rc = getJpegSettings(request->settings);
858            if (rc < 0) {
859                ALOGE("%s: failed to get jpeg parameters", __func__);
860                pthread_mutex_unlock(&mMutex);
861                return rc;
862            }
863        }
864
865        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
866        if (rc != OK) {
867            ALOGE("%s: fence wait failed %d", __func__, rc);
868            pthread_mutex_unlock(&mMutex);
869            return rc;
870        }
871        streamTypeMask |= channel->getStreamTypeMask();
872    }
873
874    rc = setFrameParameters(request->frame_number, request->settings, streamTypeMask);
875    if (rc < 0) {
876        ALOGE("%s: fail to set frame parameters", __func__);
877        pthread_mutex_unlock(&mMutex);
878        return rc;
879    }
880
881    /* Update pending request list and pending buffers map */
882    PendingRequestInfo pendingRequest;
883    pendingRequest.frame_number = frameNumber;
884    pendingRequest.num_buffers = request->num_output_buffers;
885    pendingRequest.request_id = request_id;
886    pendingRequest.blob_request = blob_request;
887
888    for (size_t i = 0; i < request->num_output_buffers; i++) {
889        RequestedBufferInfo requestedBuf;
890        requestedBuf.stream = request->output_buffers[i].stream;
891        requestedBuf.buffer = NULL;
892        pendingRequest.buffers.push_back(requestedBuf);
893
894        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
895    }
896    mPendingRequestsList.push_back(pendingRequest);
897
898    // Notify metadata channel we receive a request
899    mMetadataChannel->request(NULL, frameNumber);
900
901    // Call request on other streams
902    for (size_t i = 0; i < request->num_output_buffers; i++) {
903        const camera3_stream_buffer_t& output = request->output_buffers[i];
904        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
905        mm_camera_buf_def_t *pInputBuffer = NULL;
906
907        if (channel == NULL) {
908            ALOGE("%s: invalid channel pointer for stream", __func__);
909            continue;
910        }
911
912        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
913            QCamera3RegularChannel* inputChannel = NULL;
914            if(request->input_buffer != NULL){
915
916                //Try to get the internal format
917                inputChannel = (QCamera3RegularChannel*)
918                    request->input_buffer->stream->priv;
919                if(inputChannel == NULL ){
920                    ALOGE("%s: failed to get input channel handle", __func__);
921                } else {
922                    pInputBuffer =
923                        inputChannel->getInternalFormatBuffer(
924                                request->input_buffer->buffer);
925                    ALOGD("%s: Input buffer dump",__func__);
926                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
927                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
928                    ALOGD("frame len:%d", pInputBuffer->frame_len);
929                }
930            }
931            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
932                            pInputBuffer,(QCamera3Channel*)inputChannel);
933        } else {
934            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
935                __LINE__, output.buffer, frameNumber);
936            rc = channel->request(output.buffer, frameNumber);
937        }
938        if (rc < 0)
939            ALOGE("%s: request failed", __func__);
940    }
941
942    mFirstRequest = false;
943
944    //Block on conditional variable
945    mPendingRequest = 1;
946    while (mPendingRequest == 1) {
947        pthread_cond_wait(&mRequestCond, &mMutex);
948    }
949
950    pthread_mutex_unlock(&mMutex);
951    return rc;
952}
953
954/*===========================================================================
955 * FUNCTION   : getMetadataVendorTagOps
956 *
957 * DESCRIPTION:
958 *
959 * PARAMETERS :
960 *
961 *
962 * RETURN     :
963 *==========================================================================*/
964void QCamera3HardwareInterface::getMetadataVendorTagOps(
965                    vendor_tag_query_ops_t* /*ops*/)
966{
967    /* Enable locks when we eventually add Vendor Tags */
968    /*
969    pthread_mutex_lock(&mMutex);
970
971    pthread_mutex_unlock(&mMutex);
972    */
973    return;
974}
975
976/*===========================================================================
977 * FUNCTION   : dump
978 *
979 * DESCRIPTION:
980 *
981 * PARAMETERS :
982 *
983 *
984 * RETURN     :
985 *==========================================================================*/
986void QCamera3HardwareInterface::dump(int /*fd*/)
987{
988    /*Enable lock when we implement this function*/
989    /*
990    pthread_mutex_lock(&mMutex);
991
992    pthread_mutex_unlock(&mMutex);
993    */
994    return;
995}
996
997
998/*===========================================================================
999 * FUNCTION   : captureResultCb
1000 *
1001 * DESCRIPTION: Callback handler for all capture result
1002 *              (streams, as well as metadata)
1003 *
1004 * PARAMETERS :
1005 *   @metadata : metadata information
1006 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1007 *               NULL if metadata.
1008 *
1009 * RETURN     : NONE
1010 *==========================================================================*/
1011void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1012                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1013{
1014    pthread_mutex_lock(&mMutex);
1015
1016    if (metadata_buf) {
1017        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1018        int32_t frame_number_valid = *(int32_t *)
1019            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1020        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1021            CAM_INTF_META_PENDING_REQUESTS, metadata);
1022        uint32_t frame_number = *(uint32_t *)
1023            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1024        const struct timeval *tv = (const struct timeval *)
1025            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1026        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1027            tv->tv_usec * NSEC_PER_USEC;
1028
1029        if (!frame_number_valid) {
1030            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1031            mMetadataChannel->bufDone(metadata_buf);
1032            goto done_metadata;
1033        }
1034        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1035                frame_number, capture_time);
1036
1037        // Go through the pending requests info and send shutter/results to frameworks
1038        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1039                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1040            camera3_capture_result_t result;
1041            camera3_notify_msg_t notify_msg;
1042            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1043
1044            // Flush out all entries with less or equal frame numbers.
1045
1046            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1047            //Right now it's the same as metadata timestamp
1048
1049            //TODO: When there is metadata drop, how do we derive the timestamp of
1050            //dropped frames? For now, we fake the dropped timestamp by substracting
1051            //from the reported timestamp
1052            nsecs_t current_capture_time = capture_time -
1053                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1054
1055            // Send shutter notify to frameworks
1056            notify_msg.type = CAMERA3_MSG_SHUTTER;
1057            notify_msg.message.shutter.frame_number = i->frame_number;
1058            notify_msg.message.shutter.timestamp = current_capture_time;
1059            mCallbackOps->notify(mCallbackOps, &notify_msg);
1060            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1061                    i->frame_number, capture_time);
1062
1063            // Send empty metadata with already filled buffers for dropped metadata
1064            // and send valid metadata with already filled buffers for current metadata
1065            if (i->frame_number < frame_number) {
1066                CameraMetadata dummyMetadata;
1067                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1068                        &current_capture_time, 1);
1069                dummyMetadata.update(ANDROID_REQUEST_ID,
1070                        &(i->request_id), 1);
1071                result.result = dummyMetadata.release();
1072            } else {
1073                result.result = translateCbMetadataToResultMetadata(metadata,
1074                        current_capture_time, i->request_id);
1075                if (i->blob_request && needReprocess()) {
1076                   //If it is a blob request then send the metadata to the picture channel
1077                   mPictureChannel->queueMetadata(metadata_buf);
1078
1079                } else {
1080                   // Return metadata buffer
1081                   mMetadataChannel->bufDone(metadata_buf);
1082                   free(metadata_buf);
1083                }
1084            }
1085            if (!result.result) {
1086                ALOGE("%s: metadata is NULL", __func__);
1087            }
1088            result.frame_number = i->frame_number;
1089            result.num_output_buffers = 0;
1090            result.output_buffers = NULL;
1091            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1092                    j != i->buffers.end(); j++) {
1093                if (j->buffer) {
1094                    result.num_output_buffers++;
1095                }
1096            }
1097
1098            if (result.num_output_buffers > 0) {
1099                camera3_stream_buffer_t *result_buffers =
1100                    new camera3_stream_buffer_t[result.num_output_buffers];
1101                if (!result_buffers) {
1102                    ALOGE("%s: Fatal error: out of memory", __func__);
1103                }
1104                size_t result_buffers_idx = 0;
1105                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1106                        j != i->buffers.end(); j++) {
1107                    if (j->buffer) {
1108                        result_buffers[result_buffers_idx++] = *(j->buffer);
1109                        free(j->buffer);
1110                        j->buffer = NULL;
1111                        mPendingBuffersMap.editValueFor(j->stream)--;
1112                    }
1113                }
1114                result.output_buffers = result_buffers;
1115
1116                mCallbackOps->process_capture_result(mCallbackOps, &result);
1117                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1118                        __func__, result.frame_number, current_capture_time);
1119                free_camera_metadata((camera_metadata_t *)result.result);
1120                delete[] result_buffers;
1121            } else {
1122                mCallbackOps->process_capture_result(mCallbackOps, &result);
1123                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1124                        __func__, result.frame_number, current_capture_time);
1125                free_camera_metadata((camera_metadata_t *)result.result);
1126            }
1127            // erase the element from the list
1128            i = mPendingRequestsList.erase(i);
1129        }
1130
1131
1132done_metadata:
1133        bool max_buffers_dequeued = false;
1134        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1135            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1136            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1137            if (queued_buffers == stream->max_buffers) {
1138                max_buffers_dequeued = true;
1139                break;
1140            }
1141        }
1142        if (!max_buffers_dequeued && !pending_requests) {
1143            // Unblock process_capture_request
1144            mPendingRequest = 0;
1145            pthread_cond_signal(&mRequestCond);
1146        }
1147    } else {
1148        // If the frame number doesn't exist in the pending request list,
1149        // directly send the buffer to the frameworks, and update pending buffers map
1150        // Otherwise, book-keep the buffer.
1151        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1152        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1153            i++;
1154        }
1155        if (i == mPendingRequestsList.end()) {
1156            // Verify all pending requests frame_numbers are greater
1157            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1158                    j != mPendingRequestsList.end(); j++) {
1159                if (j->frame_number < frame_number) {
1160                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1161                            __func__, j->frame_number, frame_number);
1162                }
1163            }
1164            camera3_capture_result_t result;
1165            result.result = NULL;
1166            result.frame_number = frame_number;
1167            result.num_output_buffers = 1;
1168            result.output_buffers = buffer;
1169            ALOGV("%s: result frame_number = %d, buffer = %p",
1170                    __func__, frame_number, buffer);
1171            mPendingBuffersMap.editValueFor(buffer->stream)--;
1172            mCallbackOps->process_capture_result(mCallbackOps, &result);
1173        } else {
1174            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1175                    j != i->buffers.end(); j++) {
1176                if (j->stream == buffer->stream) {
1177                    if (j->buffer != NULL) {
1178                        ALOGE("%s: Error: buffer is already set", __func__);
1179                    } else {
1180                        j->buffer = (camera3_stream_buffer_t *)malloc(
1181                                sizeof(camera3_stream_buffer_t));
1182                        *(j->buffer) = *buffer;
1183                        ALOGV("%s: cache buffer %p at result frame_number %d",
1184                                __func__, buffer, frame_number);
1185                    }
1186                }
1187            }
1188        }
1189    }
1190    pthread_mutex_unlock(&mMutex);
1191    return;
1192}
1193
1194/*===========================================================================
1195 * FUNCTION   : translateCbMetadataToResultMetadata
1196 *
1197 * DESCRIPTION:
1198 *
1199 * PARAMETERS :
1200 *   @metadata : metadata information from callback
1201 *
1202 * RETURN     : camera_metadata_t*
1203 *              metadata in a format specified by fwk
1204 *==========================================================================*/
1205camera_metadata_t*
1206QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1207                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1208                                 int32_t request_id)
1209{
1210    CameraMetadata camMetadata;
1211    camera_metadata_t* resultMetadata;
1212
1213    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1214    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1215
1216    /*CAM_INTF_META_HISTOGRAM - TODO*/
1217    /*cam_hist_stats_t  *histogram =
1218      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1219      metadata);*/
1220
1221    /*face detection*/
1222    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1223        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1224    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1225    int32_t faceIds[numFaces];
1226    uint8_t faceScores[numFaces];
1227    int32_t faceRectangles[numFaces * 4];
1228    int32_t faceLandmarks[numFaces * 6];
1229    int j = 0, k = 0;
1230    for (int i = 0; i < numFaces; i++) {
1231        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1232        faceScores[i] = faceDetectionInfo->faces[i].score;
1233        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1234                faceRectangles+j, -1);
1235        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1236        j+= 4;
1237        k+= 6;
1238    }
1239    if (numFaces > 0) {
1240        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1241        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1242        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1243            faceRectangles, numFaces*4);
1244        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1245            faceLandmarks, numFaces*6);
1246    }
1247
1248    uint8_t  *color_correct_mode =
1249        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1250    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1251
1252    int32_t  *ae_precapture_id =
1253        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1254    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1255
1256    /*aec regions*/
1257    cam_area_t  *hAeRegions =
1258        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1259    int32_t aeRegions[5];
1260    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1261    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1262    if(mIsZslMode) {
1263        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1264        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1265    } else {
1266        uint8_t *ae_state =
1267            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1268        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1269    }
1270    uint8_t  *focusMode =
1271        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1272    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1273
1274    /*af regions*/
1275    cam_area_t  *hAfRegions =
1276        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1277    int32_t afRegions[5];
1278    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1279    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1280
1281    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1282    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1283
1284    int32_t  *afTriggerId =
1285        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1286    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1287
1288    uint8_t  *whiteBalance =
1289        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1290    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1291
1292    /*awb regions*/
1293    cam_area_t  *hAwbRegions =
1294        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1295    int32_t awbRegions[5];
1296    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1297    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1298
1299    uint8_t  *whiteBalanceState =
1300        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1301    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1302
1303    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1304    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1305
1306    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1307    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1308
1309    uint8_t  *flashPower =
1310        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1311    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1312
1313    int64_t  *flashFiringTime =
1314        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1315    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1316
1317    /*int32_t  *ledMode =
1318      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1319      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1320
1321    uint8_t  *flashState =
1322        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1323    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1324
1325    uint8_t  *hotPixelMode =
1326        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1327    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1328
1329    float  *lensAperture =
1330        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1331    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1332
1333    float  *filterDensity =
1334        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1335    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1336
1337    float  *focalLength =
1338        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1339    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1340
1341    float  *focusDistance =
1342        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1343    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1344
1345    float  *focusRange =
1346        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1347    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1348
1349    uint8_t  *opticalStab =
1350        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1351    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1352
1353    /*int32_t  *focusState =
1354      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1355      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1356
1357    uint8_t  *noiseRedMode =
1358        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1359    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1360
1361    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1362
1363    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1364        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1365    int32_t scalerCropRegion[4];
1366    scalerCropRegion[0] = hScalerCropRegion->left;
1367    scalerCropRegion[1] = hScalerCropRegion->top;
1368    scalerCropRegion[2] = hScalerCropRegion->width;
1369    scalerCropRegion[3] = hScalerCropRegion->height;
1370    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1371
1372    int64_t  *sensorExpTime =
1373        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1374    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1375
1376    int64_t  *sensorFameDuration =
1377        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1378    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1379
1380    int32_t  *sensorSensitivity =
1381        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1382    mMetadataResponse.iso_speed = *sensorSensitivity;
1383    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1384
1385    uint8_t  *shadingMode =
1386        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1387    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1388
1389    uint8_t  *faceDetectMode =
1390        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1391    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1392
1393    uint8_t  *histogramMode =
1394        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1395    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1396
1397    uint8_t  *sharpnessMapMode =
1398        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1399    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1400            sharpnessMapMode, 1);
1401
1402    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1403    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1404        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1405    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1406            (int32_t*)sharpnessMap->sharpness,
1407            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1408
1409    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1410        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1411    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1412    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1413    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1414                       (float*)lensShadingMap->lens_shading,
1415                       4*map_width*map_height);
1416
1417    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1418        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1419    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1420
1421    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1422        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1423    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1424                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1425
1426    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1427        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1428    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1429                       predColorCorrectionGains->gains, 4);
1430
1431    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1432        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1433    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1434                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1435
1436    uint8_t *blackLevelLock = (uint8_t*)
1437        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1438    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1439
1440    uint8_t *sceneFlicker = (uint8_t*)
1441        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1442    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1443
1444
1445    resultMetadata = camMetadata.release();
1446    return resultMetadata;
1447}
1448
1449/*===========================================================================
1450 * FUNCTION   : convertToRegions
1451 *
1452 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1453 *
1454 * PARAMETERS :
1455 *   @rect   : cam_rect_t struct to convert
1456 *   @region : int32_t destination array
1457 *   @weight : if we are converting from cam_area_t, weight is valid
1458 *             else weight = -1
1459 *
1460 *==========================================================================*/
1461void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1462    region[0] = rect.left;
1463    region[1] = rect.top;
1464    region[2] = rect.left + rect.width;
1465    region[3] = rect.top + rect.height;
1466    if (weight > -1) {
1467        region[4] = weight;
1468    }
1469}
1470
1471/*===========================================================================
1472 * FUNCTION   : convertFromRegions
1473 *
1474 * DESCRIPTION: helper method to convert from array to cam_rect_t
1475 *
1476 * PARAMETERS :
1477 *   @rect   : cam_rect_t struct to convert
1478 *   @region : int32_t destination array
1479 *   @weight : if we are converting from cam_area_t, weight is valid
1480 *             else weight = -1
1481 *
1482 *==========================================================================*/
1483void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1484                                                   const camera_metadata_t *settings,
1485                                                   uint32_t tag){
1486    CameraMetadata frame_settings;
1487    frame_settings = settings;
1488    int32_t x_min = frame_settings.find(tag).data.i32[0];
1489    int32_t y_min = frame_settings.find(tag).data.i32[1];
1490    int32_t x_max = frame_settings.find(tag).data.i32[2];
1491    int32_t y_max = frame_settings.find(tag).data.i32[3];
1492    roi->weight = frame_settings.find(tag).data.i32[4];
1493    roi->rect.left = x_min;
1494    roi->rect.top = y_min;
1495    roi->rect.width = x_max - x_min;
1496    roi->rect.height = y_max - y_min;
1497}
1498
1499/*===========================================================================
1500 * FUNCTION   : resetIfNeededROI
1501 *
1502 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1503 *              crop region
1504 *
1505 * PARAMETERS :
1506 *   @roi       : cam_area_t struct to resize
1507 *   @scalerCropRegion : cam_crop_region_t region to compare against
1508 *
1509 *
1510 *==========================================================================*/
1511bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1512                                                 const cam_crop_region_t* scalerCropRegion)
1513{
1514    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1515    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1516    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1517    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1518    if ((roi_x_max < scalerCropRegion->left) ||
1519        (roi_y_max < scalerCropRegion->top)  ||
1520        (roi->rect.left > crop_x_max) ||
1521        (roi->rect.top > crop_y_max)){
1522        return false;
1523    }
1524    if (roi->rect.left < scalerCropRegion->left) {
1525        roi->rect.left = scalerCropRegion->left;
1526    }
1527    if (roi->rect.top < scalerCropRegion->top) {
1528        roi->rect.top = scalerCropRegion->top;
1529    }
1530    if (roi_x_max > crop_x_max) {
1531        roi_x_max = crop_x_max;
1532    }
1533    if (roi_y_max > crop_y_max) {
1534        roi_y_max = crop_y_max;
1535    }
1536    roi->rect.width = roi_x_max - roi->rect.left;
1537    roi->rect.height = roi_y_max - roi->rect.top;
1538    return true;
1539}
1540
1541/*===========================================================================
1542 * FUNCTION   : convertLandmarks
1543 *
1544 * DESCRIPTION: helper method to extract the landmarks from face detection info
1545 *
1546 * PARAMETERS :
1547 *   @face   : cam_rect_t struct to convert
1548 *   @landmarks : int32_t destination array
1549 *
1550 *
1551 *==========================================================================*/
1552void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1553{
1554    landmarks[0] = face.left_eye_center.x;
1555    landmarks[1] = face.left_eye_center.y;
1556    landmarks[2] = face.right_eye_center.y;
1557    landmarks[3] = face.right_eye_center.y;
1558    landmarks[4] = face.mouth_center.x;
1559    landmarks[5] = face.mouth_center.y;
1560}
1561
1562#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1563/*===========================================================================
1564 * FUNCTION   : initCapabilities
1565 *
1566 * DESCRIPTION: initialize camera capabilities in static data struct
1567 *
1568 * PARAMETERS :
1569 *   @cameraId  : camera Id
1570 *
1571 * RETURN     : int32_t type of status
1572 *              NO_ERROR  -- success
1573 *              none-zero failure code
1574 *==========================================================================*/
1575int QCamera3HardwareInterface::initCapabilities(int cameraId)
1576{
1577    int rc = 0;
1578    mm_camera_vtbl_t *cameraHandle = NULL;
1579    QCamera3HeapMemory *capabilityHeap = NULL;
1580
1581    cameraHandle = camera_open(cameraId);
1582    if (!cameraHandle) {
1583        ALOGE("%s: camera_open failed", __func__);
1584        rc = -1;
1585        goto open_failed;
1586    }
1587
1588    capabilityHeap = new QCamera3HeapMemory();
1589    if (capabilityHeap == NULL) {
1590        ALOGE("%s: creation of capabilityHeap failed", __func__);
1591        goto heap_creation_failed;
1592    }
1593    /* Allocate memory for capability buffer */
1594    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1595    if(rc != OK) {
1596        ALOGE("%s: No memory for cappability", __func__);
1597        goto allocate_failed;
1598    }
1599
1600    /* Map memory for capability buffer */
1601    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1602    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1603                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1604                                capabilityHeap->getFd(0),
1605                                sizeof(cam_capability_t));
1606    if(rc < 0) {
1607        ALOGE("%s: failed to map capability buffer", __func__);
1608        goto map_failed;
1609    }
1610
1611    /* Query Capability */
1612    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1613    if(rc < 0) {
1614        ALOGE("%s: failed to query capability",__func__);
1615        goto query_failed;
1616    }
1617    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1618    if (!gCamCapability[cameraId]) {
1619        ALOGE("%s: out of memory", __func__);
1620        goto query_failed;
1621    }
1622    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1623                                        sizeof(cam_capability_t));
1624    rc = 0;
1625
1626query_failed:
1627    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1628                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1629map_failed:
1630    capabilityHeap->deallocate();
1631allocate_failed:
1632    delete capabilityHeap;
1633heap_creation_failed:
1634    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1635    cameraHandle = NULL;
1636open_failed:
1637    return rc;
1638}
1639
1640/*===========================================================================
1641 * FUNCTION   : initParameters
1642 *
1643 * DESCRIPTION: initialize camera parameters
1644 *
1645 * PARAMETERS :
1646 *
1647 * RETURN     : int32_t type of status
1648 *              NO_ERROR  -- success
1649 *              none-zero failure code
1650 *==========================================================================*/
1651int QCamera3HardwareInterface::initParameters()
1652{
1653    int rc = 0;
1654
1655    //Allocate Set Param Buffer
1656    mParamHeap = new QCamera3HeapMemory();
1657    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1658    if(rc != OK) {
1659        rc = NO_MEMORY;
1660        ALOGE("Failed to allocate SETPARM Heap memory");
1661        delete mParamHeap;
1662        mParamHeap = NULL;
1663        return rc;
1664    }
1665
1666    //Map memory for parameters buffer
1667    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1668            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1669            mParamHeap->getFd(0),
1670            sizeof(parm_buffer_t));
1671    if(rc < 0) {
1672        ALOGE("%s:failed to map SETPARM buffer",__func__);
1673        rc = FAILED_TRANSACTION;
1674        mParamHeap->deallocate();
1675        delete mParamHeap;
1676        mParamHeap = NULL;
1677        return rc;
1678    }
1679
1680    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1681    return rc;
1682}
1683
1684/*===========================================================================
1685 * FUNCTION   : deinitParameters
1686 *
1687 * DESCRIPTION: de-initialize camera parameters
1688 *
1689 * PARAMETERS :
1690 *
1691 * RETURN     : NONE
1692 *==========================================================================*/
1693void QCamera3HardwareInterface::deinitParameters()
1694{
1695    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1696            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1697
1698    mParamHeap->deallocate();
1699    delete mParamHeap;
1700    mParamHeap = NULL;
1701
1702    mParameters = NULL;
1703}
1704
1705/*===========================================================================
1706 * FUNCTION   : calcMaxJpegSize
1707 *
1708 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1709 *
1710 * PARAMETERS :
1711 *
1712 * RETURN     : max_jpeg_size
1713 *==========================================================================*/
1714int QCamera3HardwareInterface::calcMaxJpegSize()
1715{
1716    int32_t max_jpeg_size = 0;
1717    int temp_width, temp_height;
1718    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1719        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1720        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1721        if (temp_width * temp_height > max_jpeg_size ) {
1722            max_jpeg_size = temp_width * temp_height;
1723        }
1724    }
1725    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1726    return max_jpeg_size;
1727}
1728
1729/*===========================================================================
1730 * FUNCTION   : initStaticMetadata
1731 *
1732 * DESCRIPTION: initialize the static metadata
1733 *
1734 * PARAMETERS :
1735 *   @cameraId  : camera Id
1736 *
1737 * RETURN     : int32_t type of status
1738 *              0  -- success
1739 *              non-zero failure code
1740 *==========================================================================*/
1741int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1742{
1743    int rc = 0;
1744    CameraMetadata staticInfo;
1745
1746    /* android.info: hardware level */
1747    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1748    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1749        &supportedHardwareLevel, 1);
1750
1751    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1752    /*HAL 3 only*/
1753    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1754                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1755
1756    /*hard coded for now but this should come from sensor*/
1757    float min_focus_distance;
1758    if(facingBack){
1759        min_focus_distance = 10;
1760    } else {
1761        min_focus_distance = 0;
1762    }
1763    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1764                    &min_focus_distance, 1);
1765
1766    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1767                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1768
1769    /*should be using focal lengths but sensor doesn't provide that info now*/
1770    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1771                      &gCamCapability[cameraId]->focal_length,
1772                      1);
1773
1774    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1775                      gCamCapability[cameraId]->apertures,
1776                      gCamCapability[cameraId]->apertures_count);
1777
1778    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1779                gCamCapability[cameraId]->filter_densities,
1780                gCamCapability[cameraId]->filter_densities_count);
1781
1782
1783    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1784                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1785                      gCamCapability[cameraId]->optical_stab_modes_count);
1786
1787    staticInfo.update(ANDROID_LENS_POSITION,
1788                      gCamCapability[cameraId]->lens_position,
1789                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1790
1791    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1792                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1793    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1794                      lens_shading_map_size,
1795                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1796
1797    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1798                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1799    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1800            geo_correction_map_size,
1801            sizeof(geo_correction_map_size)/sizeof(int32_t));
1802
1803    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1804                       gCamCapability[cameraId]->geo_correction_map,
1805                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1806
1807    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1808            gCamCapability[cameraId]->sensor_physical_size, 2);
1809
1810    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1811            gCamCapability[cameraId]->exposure_time_range, 2);
1812
1813    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1814            &gCamCapability[cameraId]->max_frame_duration, 1);
1815
1816
1817    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1818                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1819
1820    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1821                                               gCamCapability[cameraId]->pixel_array_size.height};
1822    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1823                      pixel_array_size, 2);
1824
1825    int32_t active_array_size[] = {0, 0,
1826                                                gCamCapability[cameraId]->active_array_size.width,
1827                                                gCamCapability[cameraId]->active_array_size.height};
1828    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1829                      active_array_size, 4);
1830
1831    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1832            &gCamCapability[cameraId]->white_level, 1);
1833
1834    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1835            gCamCapability[cameraId]->black_level_pattern, 4);
1836
1837    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1838                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1839
1840    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1841                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1842
1843    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1844                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1845    /*hardcode 0 for now*/
1846    int32_t max_face_count = 0;
1847    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1848                      &max_face_count, 1);
1849
1850    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1851                      &gCamCapability[cameraId]->histogram_size, 1);
1852
1853    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1854            &gCamCapability[cameraId]->max_histogram_count, 1);
1855
1856    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1857                                                gCamCapability[cameraId]->sharpness_map_size.height};
1858
1859    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1860            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1861
1862    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1863            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1864
1865
1866    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1867                      &gCamCapability[cameraId]->raw_min_duration,
1868                       1);
1869
1870    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1871                                                HAL_PIXEL_FORMAT_BLOB};
1872    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1873    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1874                      scalar_formats,
1875                      scalar_formats_count);
1876
1877    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1878    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1879              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1880              available_processed_sizes);
1881    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1882                available_processed_sizes,
1883                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
1884
1885    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1886                      &gCamCapability[cameraId]->jpeg_min_duration[0],
1887                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
1888
1889    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1890    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1891                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1892                 available_fps_ranges);
1893    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1894            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1895
1896    camera_metadata_rational exposureCompensationStep = {
1897            gCamCapability[cameraId]->exp_compensation_step.numerator,
1898            gCamCapability[cameraId]->exp_compensation_step.denominator};
1899    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1900                      &exposureCompensationStep, 1);
1901
1902    /*TO DO*/
1903    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1904    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1905                      availableVstabModes, sizeof(availableVstabModes));
1906
1907    /*HAL 1 and HAL 3 common*/
1908    float maxZoom = 4;
1909    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1910            &maxZoom, 1);
1911
1912    int32_t max3aRegions = 1;
1913    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1914            &max3aRegions, 1);
1915
1916    uint8_t availableFaceDetectModes[] = {
1917            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1918    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1919                      availableFaceDetectModes,
1920                      sizeof(availableFaceDetectModes));
1921
1922    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1923                                       gCamCapability[cameraId]->raw_dim.height};
1924    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1925                      raw_size,
1926                      sizeof(raw_size)/sizeof(uint32_t));
1927
1928    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1929                                                        gCamCapability[cameraId]->exposure_compensation_max};
1930    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1931            exposureCompensationRange,
1932            sizeof(exposureCompensationRange)/sizeof(int32_t));
1933
1934    uint8_t lensFacing = (facingBack) ?
1935            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1936    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1937
1938    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1939                available_processed_sizes,
1940                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1941
1942    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1943                      available_thumbnail_sizes,
1944                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1945
1946    int32_t max_jpeg_size = 0;
1947    int temp_width, temp_height;
1948    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1949        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1950        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1951        if (temp_width * temp_height > max_jpeg_size ) {
1952            max_jpeg_size = temp_width * temp_height;
1953        }
1954    }
1955    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1956    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1957                      &max_jpeg_size, 1);
1958
1959    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1960    int32_t size = 0;
1961    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1962        int val = lookupFwkName(EFFECT_MODES_MAP,
1963                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1964                                   gCamCapability[cameraId]->supported_effects[i]);
1965        if (val != NAME_NOT_FOUND) {
1966            avail_effects[size] = (uint8_t)val;
1967            size++;
1968        }
1969    }
1970    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1971                      avail_effects,
1972                      size);
1973
1974    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1975    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1976    int32_t supported_scene_modes_cnt = 0;
1977    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1978        int val = lookupFwkName(SCENE_MODES_MAP,
1979                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1980                                gCamCapability[cameraId]->supported_scene_modes[i]);
1981        if (val != NAME_NOT_FOUND) {
1982            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1983            supported_indexes[supported_scene_modes_cnt] = i;
1984            supported_scene_modes_cnt++;
1985        }
1986    }
1987
1988    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1989                      avail_scene_modes,
1990                      supported_scene_modes_cnt);
1991
1992    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1993    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1994                      supported_scene_modes_cnt,
1995                      scene_mode_overrides,
1996                      supported_indexes,
1997                      cameraId);
1998    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1999                      scene_mode_overrides,
2000                      supported_scene_modes_cnt*3);
2001
2002    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2003    size = 0;
2004    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2005        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2006                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2007                                 gCamCapability[cameraId]->supported_antibandings[i]);
2008        if (val != NAME_NOT_FOUND) {
2009            avail_antibanding_modes[size] = (uint8_t)val;
2010            size++;
2011        }
2012
2013    }
2014    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2015                      avail_antibanding_modes,
2016                      size);
2017
2018    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2019    size = 0;
2020    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2021        int val = lookupFwkName(FOCUS_MODES_MAP,
2022                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2023                                gCamCapability[cameraId]->supported_focus_modes[i]);
2024        if (val != NAME_NOT_FOUND) {
2025            avail_af_modes[size] = (uint8_t)val;
2026            size++;
2027        }
2028    }
2029    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2030                      avail_af_modes,
2031                      size);
2032
2033    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2034    size = 0;
2035    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2036        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2037                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2038                                    gCamCapability[cameraId]->supported_white_balances[i]);
2039        if (val != NAME_NOT_FOUND) {
2040            avail_awb_modes[size] = (uint8_t)val;
2041            size++;
2042        }
2043    }
2044    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2045                      avail_awb_modes,
2046                      size);
2047
2048    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2049    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2050      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2051
2052    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2053            available_flash_levels,
2054            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2055
2056
2057    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2058    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2059            &flashAvailable, 1);
2060
2061    uint8_t avail_ae_modes[5];
2062    size = 0;
2063    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2064        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2065        size++;
2066    }
2067    if (flashAvailable) {
2068        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2069        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2070        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2071    }
2072    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2073                      avail_ae_modes,
2074                      size);
2075
2076    int32_t sensitivity_range[2];
2077    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2078    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2079    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2080                      sensitivity_range,
2081                      sizeof(sensitivity_range) / sizeof(int32_t));
2082
2083    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2084                      &gCamCapability[cameraId]->max_analog_sensitivity,
2085                      1);
2086
2087    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2088                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2089                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2090
2091    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2092    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2093                      &sensor_orientation,
2094                      1);
2095
2096    int32_t max_output_streams[3] = {1, 3, 1};
2097    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2098                      max_output_streams,
2099                      3);
2100
2101    gStaticMetadata[cameraId] = staticInfo.release();
2102    return rc;
2103}
2104
2105/*===========================================================================
2106 * FUNCTION   : makeTable
2107 *
2108 * DESCRIPTION: make a table of sizes
2109 *
2110 * PARAMETERS :
2111 *
2112 *
2113 *==========================================================================*/
2114void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2115                                          int32_t* sizeTable)
2116{
2117    int j = 0;
2118    for (int i = 0; i < size; i++) {
2119        sizeTable[j] = dimTable[i].width;
2120        sizeTable[j+1] = dimTable[i].height;
2121        j+=2;
2122    }
2123}
2124
2125/*===========================================================================
2126 * FUNCTION   : makeFPSTable
2127 *
2128 * DESCRIPTION: make a table of fps ranges
2129 *
2130 * PARAMETERS :
2131 *
2132 *==========================================================================*/
2133void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2134                                          int32_t* fpsRangesTable)
2135{
2136    int j = 0;
2137    for (int i = 0; i < size; i++) {
2138        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2139        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2140        j+=2;
2141    }
2142}
2143
2144/*===========================================================================
2145 * FUNCTION   : makeOverridesList
2146 *
2147 * DESCRIPTION: make a list of scene mode overrides
2148 *
2149 * PARAMETERS :
2150 *
2151 *
2152 *==========================================================================*/
2153void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2154                                                  uint8_t size, uint8_t* overridesList,
2155                                                  uint8_t* supported_indexes,
2156                                                  int camera_id)
2157{
2158    /*daemon will give a list of overrides for all scene modes.
2159      However we should send the fwk only the overrides for the scene modes
2160      supported by the framework*/
2161    int j = 0, index = 0, supt = 0;
2162    uint8_t focus_override;
2163    for (int i = 0; i < size; i++) {
2164        supt = 0;
2165        index = supported_indexes[i];
2166        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2167        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2168                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2169                                                    overridesTable[index].awb_mode);
2170        focus_override = (uint8_t)overridesTable[index].af_mode;
2171        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2172           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2173              supt = 1;
2174              break;
2175           }
2176        }
2177        if (supt) {
2178           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2179                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2180                                              focus_override);
2181        } else {
2182           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2183        }
2184        j+=3;
2185    }
2186}
2187
2188/*===========================================================================
2189 * FUNCTION   : getPreviewHalPixelFormat
2190 *
2191 * DESCRIPTION: convert the format to type recognized by framework
2192 *
2193 * PARAMETERS : format : the format from backend
2194 *
2195 ** RETURN    : format recognized by framework
2196 *
2197 *==========================================================================*/
2198int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2199{
2200    int32_t halPixelFormat;
2201
2202    switch (format) {
2203    case CAM_FORMAT_YUV_420_NV12:
2204        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2205        break;
2206    case CAM_FORMAT_YUV_420_NV21:
2207        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2208        break;
2209    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2210        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2211        break;
2212    case CAM_FORMAT_YUV_420_YV12:
2213        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2214        break;
2215    case CAM_FORMAT_YUV_422_NV16:
2216    case CAM_FORMAT_YUV_422_NV61:
2217    default:
2218        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2219        break;
2220    }
2221    return halPixelFormat;
2222}
2223
2224/*===========================================================================
2225 * FUNCTION   : getSensorSensitivity
2226 *
2227 * DESCRIPTION: convert iso_mode to an integer value
2228 *
2229 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2230 *
2231 ** RETURN    : sensitivity supported by sensor
2232 *
2233 *==========================================================================*/
2234int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2235{
2236    int32_t sensitivity;
2237
2238    switch (iso_mode) {
2239    case CAM_ISO_MODE_100:
2240        sensitivity = 100;
2241        break;
2242    case CAM_ISO_MODE_200:
2243        sensitivity = 200;
2244        break;
2245    case CAM_ISO_MODE_400:
2246        sensitivity = 400;
2247        break;
2248    case CAM_ISO_MODE_800:
2249        sensitivity = 800;
2250        break;
2251    case CAM_ISO_MODE_1600:
2252        sensitivity = 1600;
2253        break;
2254    default:
2255        sensitivity = -1;
2256        break;
2257    }
2258    return sensitivity;
2259}
2260
2261
2262/*===========================================================================
2263 * FUNCTION   : AddSetParmEntryToBatch
2264 *
2265 * DESCRIPTION: add set parameter entry into batch
2266 *
2267 * PARAMETERS :
2268 *   @p_table     : ptr to parameter buffer
2269 *   @paramType   : parameter type
2270 *   @paramLength : length of parameter value
2271 *   @paramValue  : ptr to parameter value
2272 *
2273 * RETURN     : int32_t type of status
2274 *              NO_ERROR  -- success
2275 *              none-zero failure code
2276 *==========================================================================*/
2277int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2278                                                          cam_intf_parm_type_t paramType,
2279                                                          uint32_t paramLength,
2280                                                          void *paramValue)
2281{
2282    int position = paramType;
2283    int current, next;
2284
2285    /*************************************************************************
2286    *                 Code to take care of linking next flags                *
2287    *************************************************************************/
2288    current = GET_FIRST_PARAM_ID(p_table);
2289    if (position == current){
2290        //DO NOTHING
2291    } else if (position < current){
2292        SET_NEXT_PARAM_ID(position, p_table, current);
2293        SET_FIRST_PARAM_ID(p_table, position);
2294    } else {
2295        /* Search for the position in the linked list where we need to slot in*/
2296        while (position > GET_NEXT_PARAM_ID(current, p_table))
2297            current = GET_NEXT_PARAM_ID(current, p_table);
2298
2299        /*If node already exists no need to alter linking*/
2300        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2301            next = GET_NEXT_PARAM_ID(current, p_table);
2302            SET_NEXT_PARAM_ID(current, p_table, position);
2303            SET_NEXT_PARAM_ID(position, p_table, next);
2304        }
2305    }
2306
2307    /*************************************************************************
2308    *                   Copy contents into entry                             *
2309    *************************************************************************/
2310
2311    if (paramLength > sizeof(parm_type_t)) {
2312        ALOGE("%s:Size of input larger than max entry size",__func__);
2313        return BAD_VALUE;
2314    }
2315    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2316    return NO_ERROR;
2317}
2318
2319/*===========================================================================
2320 * FUNCTION   : lookupFwkName
2321 *
2322 * DESCRIPTION: In case the enum is not same in fwk and backend
2323 *              make sure the parameter is correctly propogated
2324 *
2325 * PARAMETERS  :
2326 *   @arr      : map between the two enums
2327 *   @len      : len of the map
2328 *   @hal_name : name of the hal_parm to map
2329 *
2330 * RETURN     : int type of status
2331 *              fwk_name  -- success
2332 *              none-zero failure code
2333 *==========================================================================*/
2334int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2335                                             int len, int hal_name)
2336{
2337
2338    for (int i = 0; i < len; i++) {
2339        if (arr[i].hal_name == hal_name)
2340            return arr[i].fwk_name;
2341    }
2342
2343    /* Not able to find matching framework type is not necessarily
2344     * an error case. This happens when mm-camera supports more attributes
2345     * than the frameworks do */
2346    ALOGD("%s: Cannot find matching framework type", __func__);
2347    return NAME_NOT_FOUND;
2348}
2349
2350/*===========================================================================
2351 * FUNCTION   : lookupHalName
2352 *
2353 * DESCRIPTION: In case the enum is not same in fwk and backend
2354 *              make sure the parameter is correctly propogated
2355 *
2356 * PARAMETERS  :
2357 *   @arr      : map between the two enums
2358 *   @len      : len of the map
2359 *   @fwk_name : name of the hal_parm to map
2360 *
2361 * RETURN     : int32_t type of status
2362 *              hal_name  -- success
2363 *              none-zero failure code
2364 *==========================================================================*/
2365int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2366                                             int len, int fwk_name)
2367{
2368    for (int i = 0; i < len; i++) {
2369       if (arr[i].fwk_name == fwk_name)
2370           return arr[i].hal_name;
2371    }
2372    ALOGE("%s: Cannot find matching hal type", __func__);
2373    return NAME_NOT_FOUND;
2374}
2375
2376/*===========================================================================
2377 * FUNCTION   : getCapabilities
2378 *
2379 * DESCRIPTION: query camera capabilities
2380 *
2381 * PARAMETERS :
2382 *   @cameraId  : camera Id
2383 *   @info      : camera info struct to be filled in with camera capabilities
2384 *
2385 * RETURN     : int32_t type of status
2386 *              NO_ERROR  -- success
2387 *              none-zero failure code
2388 *==========================================================================*/
2389int QCamera3HardwareInterface::getCamInfo(int cameraId,
2390                                    struct camera_info *info)
2391{
2392    int rc = 0;
2393
2394    if (NULL == gCamCapability[cameraId]) {
2395        rc = initCapabilities(cameraId);
2396        if (rc < 0) {
2397            //pthread_mutex_unlock(&g_camlock);
2398            return rc;
2399        }
2400    }
2401
2402    if (NULL == gStaticMetadata[cameraId]) {
2403        rc = initStaticMetadata(cameraId);
2404        if (rc < 0) {
2405            return rc;
2406        }
2407    }
2408
2409    switch(gCamCapability[cameraId]->position) {
2410    case CAM_POSITION_BACK:
2411        info->facing = CAMERA_FACING_BACK;
2412        break;
2413
2414    case CAM_POSITION_FRONT:
2415        info->facing = CAMERA_FACING_FRONT;
2416        break;
2417
2418    default:
2419        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2420        rc = -1;
2421        break;
2422    }
2423
2424
2425    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2426    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2427    info->static_camera_characteristics = gStaticMetadata[cameraId];
2428
2429    return rc;
2430}
2431
2432/*===========================================================================
2433 * FUNCTION   : translateMetadata
2434 *
2435 * DESCRIPTION: translate the metadata into camera_metadata_t
2436 *
2437 * PARAMETERS : type of the request
2438 *
2439 *
2440 * RETURN     : success: camera_metadata_t*
2441 *              failure: NULL
2442 *
2443 *==========================================================================*/
2444camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2445{
2446    pthread_mutex_lock(&mMutex);
2447
2448    if (mDefaultMetadata[type] != NULL) {
2449        pthread_mutex_unlock(&mMutex);
2450        return mDefaultMetadata[type];
2451    }
2452    //first time we are handling this request
2453    //fill up the metadata structure using the wrapper class
2454    CameraMetadata settings;
2455    //translate from cam_capability_t to camera_metadata_tag_t
2456    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2457    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2458
2459    /*control*/
2460
2461    uint8_t controlIntent = 0;
2462    switch (type) {
2463      case CAMERA3_TEMPLATE_PREVIEW:
2464        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2465        break;
2466      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2467        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2468        break;
2469      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2470        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2471        break;
2472      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2473        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2474        break;
2475      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2476        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2477        break;
2478      default:
2479        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2480        break;
2481    }
2482    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2483
2484    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2485            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2486
2487    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2488    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2489
2490    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2491    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2492
2493    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2494    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2495
2496    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2497    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2498
2499    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2500    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2501
2502    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2503    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2504
2505    static uint8_t focusMode;
2506    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2507        ALOGE("%s: Setting focus mode to auto", __func__);
2508        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2509    } else {
2510        ALOGE("%s: Setting focus mode to off", __func__);
2511        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2512    }
2513    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2514
2515    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2516    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2517
2518    /*flash*/
2519    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2520    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2521
2522    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2523    settings.update(ANDROID_FLASH_FIRING_POWER,
2524            &flashFiringLevel, 1);
2525
2526    /* lens */
2527    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2528    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2529
2530    if (gCamCapability[mCameraId]->filter_densities_count) {
2531        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2532        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2533                        gCamCapability[mCameraId]->filter_densities_count);
2534    }
2535
2536    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2537    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2538
2539    mDefaultMetadata[type] = settings.release();
2540
2541    pthread_mutex_unlock(&mMutex);
2542    return mDefaultMetadata[type];
2543}
2544
2545/*===========================================================================
2546 * FUNCTION   : setFrameParameters
2547 *
2548 * DESCRIPTION: set parameters per frame as requested in the metadata from
2549 *              framework
2550 *
2551 * PARAMETERS :
2552 *   @frame_id  : frame number for this particular request
2553 *   @settings  : frame settings information from framework
2554 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2555 *
2556 * RETURN     : success: NO_ERROR
2557 *              failure:
2558 *==========================================================================*/
2559int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2560                    const camera_metadata_t *settings, uint32_t streamTypeMask)
2561{
2562    /*translate from camera_metadata_t type to parm_type_t*/
2563    int rc = 0;
2564    if (settings == NULL && mFirstRequest) {
2565        /*settings cannot be null for the first request*/
2566        return BAD_VALUE;
2567    }
2568
2569    int32_t hal_version = CAM_HAL_V3;
2570
2571    memset(mParameters, 0, sizeof(parm_buffer_t));
2572    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2573    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2574                sizeof(hal_version), &hal_version);
2575
2576    /*we need to update the frame number in the parameters*/
2577    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2578                                sizeof(frame_id), &frame_id);
2579    if (rc < 0) {
2580        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2581        return BAD_VALUE;
2582    }
2583
2584    /* Update stream id mask where buffers are requested */
2585    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2586                                sizeof(streamTypeMask), &streamTypeMask);
2587    if (rc < 0) {
2588        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2589        return BAD_VALUE;
2590    }
2591
2592    if(settings != NULL){
2593        rc = translateMetadataToParameters(settings);
2594    }
2595    /*set the parameters to backend*/
2596    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2597    return rc;
2598}
2599
2600/*===========================================================================
2601 * FUNCTION   : translateMetadataToParameters
2602 *
2603 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2604 *
2605 *
2606 * PARAMETERS :
2607 *   @settings  : frame settings information from framework
2608 *
2609 *
2610 * RETURN     : success: NO_ERROR
2611 *              failure:
2612 *==========================================================================*/
2613int QCamera3HardwareInterface::translateMetadataToParameters
2614                                  (const camera_metadata_t *settings)
2615{
2616    int rc = 0;
2617    CameraMetadata frame_settings;
2618    frame_settings = settings;
2619
2620
2621    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2622        int32_t antibandingMode =
2623            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2624        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2625                sizeof(antibandingMode), &antibandingMode);
2626    }
2627
2628    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2629        int32_t expCompensation = frame_settings.find(
2630            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2631        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2632            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2633        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2634            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2635        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2636          sizeof(expCompensation), &expCompensation);
2637    }
2638
2639    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2640        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2641        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2642                sizeof(aeLock), &aeLock);
2643    }
2644    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2645        cam_fps_range_t fps_range;
2646        fps_range.min_fps =
2647            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2648        fps_range.max_fps =
2649            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2650        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2651                sizeof(fps_range), &fps_range);
2652    }
2653
2654    float focalDistance = -1.0;
2655    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2656        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2657        rc = AddSetParmEntryToBatch(mParameters,
2658                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2659                sizeof(focalDistance), &focalDistance);
2660    }
2661
2662    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2663        uint8_t fwk_focusMode =
2664            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2665        uint8_t focusMode;
2666        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2667            focusMode = CAM_FOCUS_MODE_INFINITY;
2668        } else{
2669         focusMode = lookupHalName(FOCUS_MODES_MAP,
2670                                   sizeof(FOCUS_MODES_MAP),
2671                                   fwk_focusMode);
2672        }
2673        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2674                sizeof(focusMode), &focusMode);
2675    }
2676
2677    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2678        uint8_t awbLock =
2679            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2680        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2681                sizeof(awbLock), &awbLock);
2682    }
2683
2684    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2685        uint8_t fwk_whiteLevel =
2686            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2687        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2688                sizeof(WHITE_BALANCE_MODES_MAP),
2689                fwk_whiteLevel);
2690        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2691                sizeof(whiteLevel), &whiteLevel);
2692    }
2693
2694    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2695        uint8_t fwk_effectMode =
2696            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2697        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2698                sizeof(EFFECT_MODES_MAP),
2699                fwk_effectMode);
2700        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2701                sizeof(effectMode), &effectMode);
2702    }
2703
2704    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2705        uint8_t fwk_aeMode =
2706            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2707        uint8_t aeMode;
2708        int32_t redeye;
2709
2710        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2711            aeMode = CAM_AE_MODE_OFF;
2712        } else {
2713            aeMode = CAM_AE_MODE_ON;
2714        }
2715        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2716            redeye = 1;
2717        } else {
2718            redeye = 0;
2719        }
2720
2721        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2722                                          sizeof(AE_FLASH_MODE_MAP),
2723                                          fwk_aeMode);
2724        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2725                sizeof(aeMode), &aeMode);
2726        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2727                sizeof(flashMode), &flashMode);
2728        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2729                sizeof(redeye), &redeye);
2730    }
2731
2732    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2733        uint8_t colorCorrectMode =
2734            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2735        rc =
2736            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2737                    sizeof(colorCorrectMode), &colorCorrectMode);
2738    }
2739
2740    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2741        cam_color_correct_gains_t colorCorrectGains;
2742        for (int i = 0; i < 4; i++) {
2743            colorCorrectGains.gains[i] =
2744                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2745        }
2746        rc =
2747            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2748                    sizeof(colorCorrectGains), &colorCorrectGains);
2749    }
2750
2751    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2752        cam_color_correct_matrix_t colorCorrectTransform;
2753        cam_rational_type_t transform_elem;
2754        int num = 0;
2755        for (int i = 0; i < 3; i++) {
2756           for (int j = 0; j < 3; j++) {
2757              transform_elem.numerator =
2758                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2759              transform_elem.denominator =
2760                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2761              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2762              num++;
2763           }
2764        }
2765        rc =
2766            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
2767                    sizeof(colorCorrectTransform), &colorCorrectTransform);
2768    }
2769
2770    cam_trigger_t aecTrigger;
2771    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2772    aecTrigger.trigger_id = -1;
2773    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2774        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2775        aecTrigger.trigger =
2776            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2777        aecTrigger.trigger_id =
2778            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2779    }
2780    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2781                                sizeof(aecTrigger), &aecTrigger);
2782
2783    /*af_trigger must come with a trigger id*/
2784    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2785        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2786        cam_trigger_t af_trigger;
2787        af_trigger.trigger =
2788            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2789        af_trigger.trigger_id =
2790            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2791        rc = AddSetParmEntryToBatch(mParameters,
2792                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2793    }
2794
2795    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2796        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2797        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2798                sizeof(metaMode), &metaMode);
2799        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2800           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2801           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2802                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2803                                             fwk_sceneMode);
2804           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2805                sizeof(sceneMode), &sceneMode);
2806        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2807           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2808           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2809                sizeof(sceneMode), &sceneMode);
2810        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2811           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2812           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2813                sizeof(sceneMode), &sceneMode);
2814        }
2815    }
2816
2817    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2818        int32_t demosaic =
2819            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2820        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2821                sizeof(demosaic), &demosaic);
2822    }
2823
2824    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2825        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2826        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
2827                sizeof(edgeMode), &edgeMode);
2828    }
2829
2830    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2831        int32_t edgeStrength =
2832            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2833        rc = AddSetParmEntryToBatch(mParameters,
2834                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2835    }
2836
2837    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2838        int32_t respectFlashMode = 1;
2839        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2840            uint8_t fwk_aeMode =
2841                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2842            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
2843                respectFlashMode = 0;
2844                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
2845                    __func__);
2846            }
2847        }
2848        if (respectFlashMode) {
2849            uint8_t flashMode =
2850                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2851            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
2852                                          sizeof(FLASH_MODES_MAP),
2853                                          flashMode);
2854            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
2855            // To check: CAM_INTF_META_FLASH_MODE usage
2856            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2857                          sizeof(flashMode), &flashMode);
2858        }
2859    }
2860
2861    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2862        uint8_t flashPower =
2863            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2864        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2865                sizeof(flashPower), &flashPower);
2866    }
2867
2868    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2869        int64_t flashFiringTime =
2870            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2871        rc = AddSetParmEntryToBatch(mParameters,
2872                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2873    }
2874
2875    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2876        uint8_t geometricMode =
2877            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2878        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2879                sizeof(geometricMode), &geometricMode);
2880    }
2881
2882    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2883        uint8_t geometricStrength =
2884            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2885        rc = AddSetParmEntryToBatch(mParameters,
2886                CAM_INTF_META_GEOMETRIC_STRENGTH,
2887                sizeof(geometricStrength), &geometricStrength);
2888    }
2889
2890    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2891        uint8_t hotPixelMode =
2892            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2893        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2894                sizeof(hotPixelMode), &hotPixelMode);
2895    }
2896
2897    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2898        float lensAperture =
2899            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2900        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2901                sizeof(lensAperture), &lensAperture);
2902    }
2903
2904    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2905        float filterDensity =
2906            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2907        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2908                sizeof(filterDensity), &filterDensity);
2909    }
2910
2911    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2912        float focalLength =
2913            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2914        rc = AddSetParmEntryToBatch(mParameters,
2915                CAM_INTF_META_LENS_FOCAL_LENGTH,
2916                sizeof(focalLength), &focalLength);
2917    }
2918
2919    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2920        uint8_t optStabMode =
2921            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2922        rc = AddSetParmEntryToBatch(mParameters,
2923                CAM_INTF_META_LENS_OPT_STAB_MODE,
2924                sizeof(optStabMode), &optStabMode);
2925    }
2926
2927    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2928        uint8_t noiseRedMode =
2929            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2930        rc = AddSetParmEntryToBatch(mParameters,
2931                CAM_INTF_META_NOISE_REDUCTION_MODE,
2932                sizeof(noiseRedMode), &noiseRedMode);
2933    }
2934
2935    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2936        uint8_t noiseRedStrength =
2937            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2938        rc = AddSetParmEntryToBatch(mParameters,
2939                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2940                sizeof(noiseRedStrength), &noiseRedStrength);
2941    }
2942
2943    cam_crop_region_t scalerCropRegion;
2944    bool scalerCropSet = false;
2945    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2946        scalerCropRegion.left =
2947            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2948        scalerCropRegion.top =
2949            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2950        scalerCropRegion.width =
2951            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2952        scalerCropRegion.height =
2953            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2954        rc = AddSetParmEntryToBatch(mParameters,
2955                CAM_INTF_META_SCALER_CROP_REGION,
2956                sizeof(scalerCropRegion), &scalerCropRegion);
2957        scalerCropSet = true;
2958    }
2959
2960    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2961        int64_t sensorExpTime =
2962            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2963        rc = AddSetParmEntryToBatch(mParameters,
2964                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2965                sizeof(sensorExpTime), &sensorExpTime);
2966    }
2967
2968    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2969        int64_t sensorFrameDuration =
2970            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2971        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
2972            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
2973        rc = AddSetParmEntryToBatch(mParameters,
2974                CAM_INTF_META_SENSOR_FRAME_DURATION,
2975                sizeof(sensorFrameDuration), &sensorFrameDuration);
2976    }
2977
2978    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2979        int32_t sensorSensitivity =
2980            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2981        if (sensorSensitivity <
2982                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
2983            sensorSensitivity =
2984                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
2985        if (sensorSensitivity >
2986                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
2987            sensorSensitivity =
2988                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
2989        rc = AddSetParmEntryToBatch(mParameters,
2990                CAM_INTF_META_SENSOR_SENSITIVITY,
2991                sizeof(sensorSensitivity), &sensorSensitivity);
2992    }
2993
2994    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2995        int32_t shadingMode =
2996            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2997        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2998                sizeof(shadingMode), &shadingMode);
2999    }
3000
3001    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3002        uint8_t shadingStrength =
3003            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3004        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3005                sizeof(shadingStrength), &shadingStrength);
3006    }
3007
3008    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3009        uint8_t facedetectMode =
3010            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3011        rc = AddSetParmEntryToBatch(mParameters,
3012                CAM_INTF_META_STATS_FACEDETECT_MODE,
3013                sizeof(facedetectMode), &facedetectMode);
3014    }
3015
3016    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3017        uint8_t histogramMode =
3018            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3019        rc = AddSetParmEntryToBatch(mParameters,
3020                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3021                sizeof(histogramMode), &histogramMode);
3022    }
3023
3024    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3025        uint8_t sharpnessMapMode =
3026            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3027        rc = AddSetParmEntryToBatch(mParameters,
3028                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3029                sizeof(sharpnessMapMode), &sharpnessMapMode);
3030    }
3031
3032    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3033        uint8_t tonemapMode =
3034            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3035        rc = AddSetParmEntryToBatch(mParameters,
3036                CAM_INTF_META_TONEMAP_MODE,
3037                sizeof(tonemapMode), &tonemapMode);
3038    }
3039    int point = 0;
3040    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3041        cam_tonemap_curve_t tonemapCurveBlue;
3042        tonemapCurveBlue.tonemap_points_cnt =
3043           gCamCapability[mCameraId]->max_tone_map_curve_points;
3044        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3045            for (int j = 0; j < 2; j++) {
3046               tonemapCurveBlue.tonemap_points[i][j] =
3047                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3048               point++;
3049            }
3050        }
3051        rc = AddSetParmEntryToBatch(mParameters,
3052                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3053                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3054    }
3055    point = 0;
3056    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3057        cam_tonemap_curve_t tonemapCurveGreen;
3058        tonemapCurveGreen.tonemap_points_cnt =
3059           gCamCapability[mCameraId]->max_tone_map_curve_points;
3060        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3061            for (int j = 0; j < 2; j++) {
3062               tonemapCurveGreen.tonemap_points[i][j] =
3063                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3064               point++;
3065            }
3066        }
3067        rc = AddSetParmEntryToBatch(mParameters,
3068                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3069                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3070    }
3071    point = 0;
3072    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3073        cam_tonemap_curve_t tonemapCurveRed;
3074        tonemapCurveRed.tonemap_points_cnt =
3075           gCamCapability[mCameraId]->max_tone_map_curve_points;
3076        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3077            for (int j = 0; j < 2; j++) {
3078               tonemapCurveRed.tonemap_points[i][j] =
3079                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3080               point++;
3081            }
3082        }
3083        rc = AddSetParmEntryToBatch(mParameters,
3084                CAM_INTF_META_TONEMAP_CURVE_RED,
3085                sizeof(tonemapCurveRed), &tonemapCurveRed);
3086    }
3087
3088    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3089        uint8_t captureIntent =
3090            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3091        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3092                sizeof(captureIntent), &captureIntent);
3093    }
3094
3095    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3096        uint8_t blackLevelLock =
3097            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3098        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3099                sizeof(blackLevelLock), &blackLevelLock);
3100    }
3101
3102    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3103        uint8_t lensShadingMapMode =
3104            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3105        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3106                sizeof(lensShadingMapMode), &lensShadingMapMode);
3107    }
3108
3109    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3110        cam_area_t roi;
3111        bool reset = true;
3112        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
3113        if (scalerCropSet) {
3114            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3115        }
3116        if (reset) {
3117            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3118                    sizeof(roi), &roi);
3119        }
3120    }
3121
3122    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3123        cam_area_t roi;
3124        bool reset = true;
3125        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
3126        if (scalerCropSet) {
3127            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3128        }
3129        if (reset) {
3130            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3131                    sizeof(roi), &roi);
3132        }
3133    }
3134
3135    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3136        cam_area_t roi;
3137        bool reset = true;
3138        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
3139        if (scalerCropSet) {
3140            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3141        }
3142        if (reset) {
3143            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3144                    sizeof(roi), &roi);
3145        }
3146    }
3147    return rc;
3148}
3149
3150/*===========================================================================
3151 * FUNCTION   : getJpegSettings
3152 *
3153 * DESCRIPTION: save the jpeg settings in the HAL
3154 *
3155 *
3156 * PARAMETERS :
3157 *   @settings  : frame settings information from framework
3158 *
3159 *
3160 * RETURN     : success: NO_ERROR
3161 *              failure:
3162 *==========================================================================*/
3163int QCamera3HardwareInterface::getJpegSettings
3164                                  (const camera_metadata_t *settings)
3165{
3166    if (mJpegSettings) {
3167        if (mJpegSettings->gps_timestamp) {
3168            free(mJpegSettings->gps_timestamp);
3169            mJpegSettings->gps_timestamp = NULL;
3170        }
3171        if (mJpegSettings->gps_coordinates) {
3172            for (int i = 0; i < 3; i++) {
3173                free(mJpegSettings->gps_coordinates[i]);
3174                mJpegSettings->gps_coordinates[i] = NULL;
3175            }
3176        }
3177        free(mJpegSettings);
3178        mJpegSettings = NULL;
3179    }
3180    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3181    CameraMetadata jpeg_settings;
3182    jpeg_settings = settings;
3183
3184    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3185        mJpegSettings->jpeg_orientation =
3186            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3187    } else {
3188        mJpegSettings->jpeg_orientation = 0;
3189    }
3190    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3191        mJpegSettings->jpeg_quality =
3192            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3193    } else {
3194        mJpegSettings->jpeg_quality = 85;
3195    }
3196    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3197        mJpegSettings->thumbnail_size.width =
3198            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3199        mJpegSettings->thumbnail_size.height =
3200            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3201    } else {
3202        mJpegSettings->thumbnail_size.width = 0;
3203        mJpegSettings->thumbnail_size.height = 0;
3204    }
3205    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3206        for (int i = 0; i < 3; i++) {
3207            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3208            *(mJpegSettings->gps_coordinates[i]) =
3209                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3210        }
3211    } else{
3212       for (int i = 0; i < 3; i++) {
3213            mJpegSettings->gps_coordinates[i] = NULL;
3214        }
3215    }
3216
3217    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3218        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3219        *(mJpegSettings->gps_timestamp) =
3220            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3221    } else {
3222        mJpegSettings->gps_timestamp = NULL;
3223    }
3224
3225    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3226        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3227        for (int i = 0; i < len; i++) {
3228            mJpegSettings->gps_processing_method[i] =
3229                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3230        }
3231        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3232            mJpegSettings->gps_processing_method[len] = '\0';
3233        }
3234    } else {
3235        mJpegSettings->gps_processing_method[0] = '\0';
3236    }
3237
3238    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3239        mJpegSettings->sensor_sensitivity =
3240            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3241    } else {
3242        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3243    }
3244
3245    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3246        mJpegSettings->lens_focal_length =
3247            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3248    }
3249    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3250        mJpegSettings->exposure_compensation =
3251            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3252    }
3253    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3254    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3255    mJpegSettings->is_jpeg_format = true;
3256    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3257    return 0;
3258}
3259
3260/*===========================================================================
3261 * FUNCTION   : captureResultCb
3262 *
3263 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3264 *
3265 * PARAMETERS :
3266 *   @frame  : frame information from mm-camera-interface
3267 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3268 *   @userdata: userdata
3269 *
3270 * RETURN     : NONE
3271 *==========================================================================*/
3272void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3273                camera3_stream_buffer_t *buffer,
3274                uint32_t frame_number, void *userdata)
3275{
3276    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3277    if (hw == NULL) {
3278        ALOGE("%s: Invalid hw %p", __func__, hw);
3279        return;
3280    }
3281
3282    hw->captureResultCb(metadata, buffer, frame_number);
3283    return;
3284}
3285
3286
3287/*===========================================================================
3288 * FUNCTION   : initialize
3289 *
3290 * DESCRIPTION: Pass framework callback pointers to HAL
3291 *
3292 * PARAMETERS :
3293 *
3294 *
3295 * RETURN     : Success : 0
3296 *              Failure: -ENODEV
3297 *==========================================================================*/
3298
3299int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3300                                  const camera3_callback_ops_t *callback_ops)
3301{
3302    ALOGV("%s: E", __func__);
3303    QCamera3HardwareInterface *hw =
3304        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3305    if (!hw) {
3306        ALOGE("%s: NULL camera device", __func__);
3307        return -ENODEV;
3308    }
3309
3310    int rc = hw->initialize(callback_ops);
3311    ALOGV("%s: X", __func__);
3312    return rc;
3313}
3314
3315/*===========================================================================
3316 * FUNCTION   : configure_streams
3317 *
3318 * DESCRIPTION:
3319 *
3320 * PARAMETERS :
3321 *
3322 *
3323 * RETURN     : Success: 0
3324 *              Failure: -EINVAL (if stream configuration is invalid)
3325 *                       -ENODEV (fatal error)
3326 *==========================================================================*/
3327
3328int QCamera3HardwareInterface::configure_streams(
3329        const struct camera3_device *device,
3330        camera3_stream_configuration_t *stream_list)
3331{
3332    ALOGV("%s: E", __func__);
3333    QCamera3HardwareInterface *hw =
3334        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3335    if (!hw) {
3336        ALOGE("%s: NULL camera device", __func__);
3337        return -ENODEV;
3338    }
3339    int rc = hw->configureStreams(stream_list);
3340    ALOGV("%s: X", __func__);
3341    return rc;
3342}
3343
3344/*===========================================================================
3345 * FUNCTION   : register_stream_buffers
3346 *
3347 * DESCRIPTION: Register stream buffers with the device
3348 *
3349 * PARAMETERS :
3350 *
3351 * RETURN     :
3352 *==========================================================================*/
3353int QCamera3HardwareInterface::register_stream_buffers(
3354        const struct camera3_device *device,
3355        const camera3_stream_buffer_set_t *buffer_set)
3356{
3357    ALOGV("%s: E", __func__);
3358    QCamera3HardwareInterface *hw =
3359        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3360    if (!hw) {
3361        ALOGE("%s: NULL camera device", __func__);
3362        return -ENODEV;
3363    }
3364    int rc = hw->registerStreamBuffers(buffer_set);
3365    ALOGV("%s: X", __func__);
3366    return rc;
3367}
3368
3369/*===========================================================================
3370 * FUNCTION   : construct_default_request_settings
3371 *
3372 * DESCRIPTION: Configure a settings buffer to meet the required use case
3373 *
3374 * PARAMETERS :
3375 *
3376 *
3377 * RETURN     : Success: Return valid metadata
3378 *              Failure: Return NULL
3379 *==========================================================================*/
3380const camera_metadata_t* QCamera3HardwareInterface::
3381    construct_default_request_settings(const struct camera3_device *device,
3382                                        int type)
3383{
3384
3385    ALOGV("%s: E", __func__);
3386    camera_metadata_t* fwk_metadata = NULL;
3387    QCamera3HardwareInterface *hw =
3388        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3389    if (!hw) {
3390        ALOGE("%s: NULL camera device", __func__);
3391        return NULL;
3392    }
3393
3394    fwk_metadata = hw->translateCapabilityToMetadata(type);
3395
3396    ALOGV("%s: X", __func__);
3397    return fwk_metadata;
3398}
3399
3400/*===========================================================================
3401 * FUNCTION   : process_capture_request
3402 *
3403 * DESCRIPTION:
3404 *
3405 * PARAMETERS :
3406 *
3407 *
3408 * RETURN     :
3409 *==========================================================================*/
3410int QCamera3HardwareInterface::process_capture_request(
3411                    const struct camera3_device *device,
3412                    camera3_capture_request_t *request)
3413{
3414    ALOGV("%s: E", __func__);
3415    QCamera3HardwareInterface *hw =
3416        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3417    if (!hw) {
3418        ALOGE("%s: NULL camera device", __func__);
3419        return -EINVAL;
3420    }
3421
3422    int rc = hw->processCaptureRequest(request);
3423    ALOGV("%s: X", __func__);
3424    return rc;
3425}
3426
3427/*===========================================================================
3428 * FUNCTION   : get_metadata_vendor_tag_ops
3429 *
3430 * DESCRIPTION:
3431 *
3432 * PARAMETERS :
3433 *
3434 *
3435 * RETURN     :
3436 *==========================================================================*/
3437
3438void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3439                const struct camera3_device *device,
3440                vendor_tag_query_ops_t* ops)
3441{
3442    ALOGV("%s: E", __func__);
3443    QCamera3HardwareInterface *hw =
3444        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3445    if (!hw) {
3446        ALOGE("%s: NULL camera device", __func__);
3447        return;
3448    }
3449
3450    hw->getMetadataVendorTagOps(ops);
3451    ALOGV("%s: X", __func__);
3452    return;
3453}
3454
3455/*===========================================================================
3456 * FUNCTION   : dump
3457 *
3458 * DESCRIPTION:
3459 *
3460 * PARAMETERS :
3461 *
3462 *
3463 * RETURN     :
3464 *==========================================================================*/
3465
3466void QCamera3HardwareInterface::dump(
3467                const struct camera3_device *device, int fd)
3468{
3469    ALOGV("%s: E", __func__);
3470    QCamera3HardwareInterface *hw =
3471        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3472    if (!hw) {
3473        ALOGE("%s: NULL camera device", __func__);
3474        return;
3475    }
3476
3477    hw->dump(fd);
3478    ALOGV("%s: X", __func__);
3479    return;
3480}
3481
3482/*===========================================================================
3483 * FUNCTION   : close_camera_device
3484 *
3485 * DESCRIPTION:
3486 *
3487 * PARAMETERS :
3488 *
3489 *
3490 * RETURN     :
3491 *==========================================================================*/
3492int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3493{
3494    ALOGV("%s: E", __func__);
3495    int ret = NO_ERROR;
3496    QCamera3HardwareInterface *hw =
3497        reinterpret_cast<QCamera3HardwareInterface *>(
3498            reinterpret_cast<camera3_device_t *>(device)->priv);
3499    if (!hw) {
3500        ALOGE("NULL camera device");
3501        return BAD_VALUE;
3502    }
3503    delete hw;
3504
3505    pthread_mutex_lock(&mCameraSessionLock);
3506    mCameraSessionActive = 0;
3507    pthread_mutex_unlock(&mCameraSessionLock);
3508    ALOGV("%s: X", __func__);
3509    return ret;
3510}
3511
3512/*===========================================================================
3513 * FUNCTION   : getWaveletDenoiseProcessPlate
3514 *
3515 * DESCRIPTION: query wavelet denoise process plate
3516 *
3517 * PARAMETERS : None
3518 *
3519 * RETURN     : WNR prcocess plate vlaue
3520 *==========================================================================*/
3521cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3522{
3523    char prop[PROPERTY_VALUE_MAX];
3524    memset(prop, 0, sizeof(prop));
3525    property_get("persist.denoise.process.plates", prop, "0");
3526    int processPlate = atoi(prop);
3527    switch(processPlate) {
3528    case 0:
3529        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3530    case 1:
3531        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3532    case 2:
3533        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3534    case 3:
3535        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3536    default:
3537        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3538    }
3539}
3540
3541/*===========================================================================
3542 * FUNCTION   : needRotationReprocess
3543 *
3544 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3545 *
3546 * PARAMETERS : none
3547 *
3548 * RETURN     : true: needed
3549 *              false: no need
3550 *==========================================================================*/
3551bool QCamera3HardwareInterface::needRotationReprocess()
3552{
3553
3554    if (!mJpegSettings->is_jpeg_format) {
3555        // RAW image, no need to reprocess
3556        return false;
3557    }
3558
3559    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3560        mJpegSettings->jpeg_orientation > 0) {
3561        // current rotation is not zero, and pp has the capability to process rotation
3562        ALOGD("%s: need do reprocess for rotation", __func__);
3563        return true;
3564    }
3565
3566    return false;
3567}
3568
3569/*===========================================================================
3570 * FUNCTION   : needReprocess
3571 *
3572 * DESCRIPTION: if reprocess in needed
3573 *
3574 * PARAMETERS : none
3575 *
3576 * RETURN     : true: needed
3577 *              false: no need
3578 *==========================================================================*/
3579bool QCamera3HardwareInterface::needReprocess()
3580{
3581    if (!mJpegSettings->is_jpeg_format) {
3582        // RAW image, no need to reprocess
3583        return false;
3584    }
3585
3586    if ((mJpegSettings->min_required_pp_mask > 0) ||
3587         isWNREnabled()) {
3588        // TODO: add for ZSL HDR later
3589        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3590        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3591        return true;
3592    }
3593    return needRotationReprocess();
3594}
3595
3596/*===========================================================================
3597 * FUNCTION   : addOnlineReprocChannel
3598 *
3599 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3600 *              coming from input channel
3601 *
3602 * PARAMETERS :
3603 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3604 *
3605 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3606 *==========================================================================*/
3607QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3608                                                      QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3609{
3610    int32_t rc = NO_ERROR;
3611    QCamera3ReprocessChannel *pChannel = NULL;
3612    if (pInputChannel == NULL) {
3613        ALOGE("%s: input channel obj is NULL", __func__);
3614        return NULL;
3615    }
3616
3617    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3618            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3619    if (NULL == pChannel) {
3620        ALOGE("%s: no mem for reprocess channel", __func__);
3621        return NULL;
3622    }
3623
3624    // Capture channel, only need snapshot and postview streams start together
3625    mm_camera_channel_attr_t attr;
3626    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3627    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3628    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3629    rc = pChannel->initialize();
3630    if (rc != NO_ERROR) {
3631        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3632        delete pChannel;
3633        return NULL;
3634    }
3635
3636    // pp feature config
3637    cam_pp_feature_config_t pp_config;
3638    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3639    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3640        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3641        pp_config.sharpness = 10;
3642    }
3643
3644    if (isWNREnabled()) {
3645        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3646        pp_config.denoise2d.denoise_enable = 1;
3647        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3648    }
3649    if (needRotationReprocess()) {
3650        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3651        int rotation = mJpegSettings->jpeg_orientation;
3652        if (rotation == 0) {
3653            pp_config.rotation = ROTATE_0;
3654        } else if (rotation == 90) {
3655            pp_config.rotation = ROTATE_90;
3656        } else if (rotation == 180) {
3657            pp_config.rotation = ROTATE_180;
3658        } else if (rotation == 270) {
3659            pp_config.rotation = ROTATE_270;
3660        }
3661    }
3662
3663   rc = pChannel->addReprocStreamsFromSource(pp_config,
3664                                             pInputChannel,
3665                                             mMetadataChannel);
3666
3667    if (rc != NO_ERROR) {
3668        delete pChannel;
3669        return NULL;
3670    }
3671    return pChannel;
3672}
3673
3674int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3675{
3676    return gCamCapability[mCameraId]->min_num_pp_bufs;
3677}
3678
3679bool QCamera3HardwareInterface::isWNREnabled() {
3680    return gCamCapability[mCameraId]->isWnrSupported;
3681}
3682
3683}; //end namespace qcamera
3684