QCamera3HWI.cpp revision 34a6e88cee59495adcf1437d027c105171738a07
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mPictureChannel(NULL),
162      mFirstRequest(false),
163      mParamHeap(NULL),
164      mParameters(NULL),
165      mJpegSettings(NULL),
166      mIsZslMode(false),
167      m_pPowerModule(NULL)
168{
169    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
170    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
171    mCameraDevice.common.close = close_camera_device;
172    mCameraDevice.ops = &mCameraOps;
173    mCameraDevice.priv = this;
174    gCamCapability[cameraId]->version = CAM_HAL_V3;
175    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
176    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
177    gCamCapability[cameraId]->min_num_pp_bufs = 3;
178
179    pthread_cond_init(&mRequestCond, NULL);
180    mPendingRequest = 0;
181    mCurrentRequestId = -1;
182    pthread_mutex_init(&mMutex, NULL);
183
184    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
185        mDefaultMetadata[i] = NULL;
186
187#ifdef HAS_MULTIMEDIA_HINTS
188    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
189        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
190    }
191#endif
192}
193
194/*===========================================================================
195 * FUNCTION   : ~QCamera3HardwareInterface
196 *
197 * DESCRIPTION: destructor of QCamera3HardwareInterface
198 *
199 * PARAMETERS : none
200 *
201 * RETURN     : none
202 *==========================================================================*/
203QCamera3HardwareInterface::~QCamera3HardwareInterface()
204{
205    ALOGV("%s: E", __func__);
206    /* We need to stop all streams before deleting any stream */
207    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
208        it != mStreamInfo.end(); it++) {
209        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
210        if (channel)
211           channel->stop();
212    }
213    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
214        it != mStreamInfo.end(); it++) {
215        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
216        if (channel)
217            delete channel;
218        free (*it);
219    }
220
221    mPictureChannel = NULL;
222
223    if (mJpegSettings != NULL) {
224        free(mJpegSettings);
225        mJpegSettings = NULL;
226    }
227
228    /* Clean up all channels */
229    if (mCameraInitialized) {
230        mMetadataChannel->stop();
231        delete mMetadataChannel;
232        mMetadataChannel = NULL;
233        deinitParameters();
234    }
235
236    if (mCameraOpened)
237        closeCamera();
238
239    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
240        if (mDefaultMetadata[i])
241            free_camera_metadata(mDefaultMetadata[i]);
242
243    pthread_cond_destroy(&mRequestCond);
244
245    pthread_mutex_destroy(&mMutex);
246    ALOGV("%s: X", __func__);
247}
248
249/*===========================================================================
250 * FUNCTION   : openCamera
251 *
252 * DESCRIPTION: open camera
253 *
254 * PARAMETERS :
255 *   @hw_device  : double ptr for camera device struct
256 *
257 * RETURN     : int32_t type of status
258 *              NO_ERROR  -- success
259 *              none-zero failure code
260 *==========================================================================*/
261int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
262{
263    int rc = 0;
264    pthread_mutex_lock(&mCameraSessionLock);
265    if (mCameraSessionActive) {
266        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
267        pthread_mutex_unlock(&mCameraSessionLock);
268        return INVALID_OPERATION;
269    }
270
271    if (mCameraOpened) {
272        *hw_device = NULL;
273        return PERMISSION_DENIED;
274    }
275
276    rc = openCamera();
277    if (rc == 0) {
278        *hw_device = &mCameraDevice.common;
279        mCameraSessionActive = 1;
280    } else
281        *hw_device = NULL;
282
283#ifdef HAS_MULTIMEDIA_HINTS
284    if (rc == 0) {
285        if (m_pPowerModule) {
286            if (m_pPowerModule->powerHint) {
287                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
288                        (void *)"state=1");
289            }
290        }
291    }
292#endif
293    pthread_mutex_unlock(&mCameraSessionLock);
294    return rc;
295}
296
297/*===========================================================================
298 * FUNCTION   : openCamera
299 *
300 * DESCRIPTION: open camera
301 *
302 * PARAMETERS : none
303 *
304 * RETURN     : int32_t type of status
305 *              NO_ERROR  -- success
306 *              none-zero failure code
307 *==========================================================================*/
308int QCamera3HardwareInterface::openCamera()
309{
310    if (mCameraHandle) {
311        ALOGE("Failure: Camera already opened");
312        return ALREADY_EXISTS;
313    }
314    mCameraHandle = camera_open(mCameraId);
315    if (!mCameraHandle) {
316        ALOGE("camera_open failed.");
317        return UNKNOWN_ERROR;
318    }
319
320    mCameraOpened = true;
321
322    return NO_ERROR;
323}
324
325/*===========================================================================
326 * FUNCTION   : closeCamera
327 *
328 * DESCRIPTION: close camera
329 *
330 * PARAMETERS : none
331 *
332 * RETURN     : int32_t type of status
333 *              NO_ERROR  -- success
334 *              none-zero failure code
335 *==========================================================================*/
336int QCamera3HardwareInterface::closeCamera()
337{
338    int rc = NO_ERROR;
339
340    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
341    mCameraHandle = NULL;
342    mCameraOpened = false;
343
344#ifdef HAS_MULTIMEDIA_HINTS
345    if (rc == NO_ERROR) {
346        if (m_pPowerModule) {
347            if (m_pPowerModule->powerHint) {
348                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
349                        (void *)"state=0");
350            }
351        }
352    }
353#endif
354
355    return rc;
356}
357
358/*===========================================================================
359 * FUNCTION   : initialize
360 *
361 * DESCRIPTION: Initialize frameworks callback functions
362 *
363 * PARAMETERS :
364 *   @callback_ops : callback function to frameworks
365 *
366 * RETURN     :
367 *
368 *==========================================================================*/
369int QCamera3HardwareInterface::initialize(
370        const struct camera3_callback_ops *callback_ops)
371{
372    int rc;
373
374    pthread_mutex_lock(&mMutex);
375
376    rc = initParameters();
377    if (rc < 0) {
378        ALOGE("%s: initParamters failed %d", __func__, rc);
379       goto err1;
380    }
381    //Create metadata channel and initialize it
382    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
383                    mCameraHandle->ops, captureResultCb,
384                    &gCamCapability[mCameraId]->padding_info, this);
385    if (mMetadataChannel == NULL) {
386        ALOGE("%s: failed to allocate metadata channel", __func__);
387        rc = -ENOMEM;
388        goto err2;
389    }
390    rc = mMetadataChannel->initialize();
391    if (rc < 0) {
392        ALOGE("%s: metadata channel initialization failed", __func__);
393        goto err3;
394    }
395
396    mCallbackOps = callback_ops;
397
398    pthread_mutex_unlock(&mMutex);
399    mCameraInitialized = true;
400    return 0;
401
402err3:
403    delete mMetadataChannel;
404    mMetadataChannel = NULL;
405err2:
406    deinitParameters();
407err1:
408    pthread_mutex_unlock(&mMutex);
409    return rc;
410}
411
412/*===========================================================================
413 * FUNCTION   : configureStreams
414 *
415 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
416 *              and output streams.
417 *
418 * PARAMETERS :
419 *   @stream_list : streams to be configured
420 *
421 * RETURN     :
422 *
423 *==========================================================================*/
424int QCamera3HardwareInterface::configureStreams(
425        camera3_stream_configuration_t *streamList)
426{
427    int rc = 0;
428    pthread_mutex_lock(&mMutex);
429    // Sanity check stream_list
430    if (streamList == NULL) {
431        ALOGE("%s: NULL stream configuration", __func__);
432        pthread_mutex_unlock(&mMutex);
433        return BAD_VALUE;
434    }
435
436    if (streamList->streams == NULL) {
437        ALOGE("%s: NULL stream list", __func__);
438        pthread_mutex_unlock(&mMutex);
439        return BAD_VALUE;
440    }
441
442    if (streamList->num_streams < 1) {
443        ALOGE("%s: Bad number of streams requested: %d", __func__,
444                streamList->num_streams);
445        pthread_mutex_unlock(&mMutex);
446        return BAD_VALUE;
447    }
448
449    camera3_stream_t *inputStream = NULL;
450    camera3_stream_t *jpegStream = NULL;
451    /* first invalidate all the steams in the mStreamList
452     * if they appear again, they will be validated */
453    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
454            it != mStreamInfo.end(); it++) {
455        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
456        channel->stop();
457        (*it)->status = INVALID;
458    }
459
460    for (size_t i = 0; i < streamList->num_streams; i++) {
461        camera3_stream_t *newStream = streamList->streams[i];
462        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
463                __func__, newStream->stream_type, newStream->format,
464                 newStream->width, newStream->height);
465        //if the stream is in the mStreamList validate it
466        bool stream_exists = false;
467        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
468                it != mStreamInfo.end(); it++) {
469            if ((*it)->stream == newStream) {
470                QCamera3Channel *channel =
471                    (QCamera3Channel*)(*it)->stream->priv;
472                stream_exists = true;
473                (*it)->status = RECONFIGURE;
474                /*delete the channel object associated with the stream because
475                  we need to reconfigure*/
476                delete channel;
477                (*it)->stream->priv = NULL;
478            }
479        }
480        if (!stream_exists) {
481            //new stream
482            stream_info_t* stream_info;
483            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
484            stream_info->stream = newStream;
485            stream_info->status = VALID;
486            stream_info->registered = 0;
487            mStreamInfo.push_back(stream_info);
488        }
489        if (newStream->stream_type == CAMERA3_STREAM_INPUT
490                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
491            if (inputStream != NULL) {
492                ALOGE("%s: Multiple input streams requested!", __func__);
493                pthread_mutex_unlock(&mMutex);
494                return BAD_VALUE;
495            }
496            inputStream = newStream;
497        }
498        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
499            jpegStream = newStream;
500        }
501    }
502    mInputStream = inputStream;
503
504    /*clean up invalid streams*/
505    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
506            it != mStreamInfo.end();) {
507        if(((*it)->status) == INVALID){
508            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
509            delete channel;
510            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
511            free(*it);
512            it = mStreamInfo.erase(it);
513        } else {
514            it++;
515        }
516    }
517
518    //mMetadataChannel->stop();
519
520    /* Allocate channel objects for the requested streams */
521    for (size_t i = 0; i < streamList->num_streams; i++) {
522        camera3_stream_t *newStream = streamList->streams[i];
523        if (newStream->priv == NULL) {
524            //New stream, construct channel
525            switch (newStream->stream_type) {
526            case CAMERA3_STREAM_INPUT:
527                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
528                break;
529            case CAMERA3_STREAM_BIDIRECTIONAL:
530                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
531                    GRALLOC_USAGE_HW_CAMERA_WRITE;
532                break;
533            case CAMERA3_STREAM_OUTPUT:
534                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
535                break;
536            default:
537                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
538                break;
539            }
540
541            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
542                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
543                QCamera3Channel *channel;
544                switch (newStream->format) {
545                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
546                case HAL_PIXEL_FORMAT_YCbCr_420_888:
547                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
548                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
549                        jpegStream) {
550                        uint32_t width = jpegStream->width;
551                        uint32_t height = jpegStream->height;
552                        mIsZslMode = true;
553                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
554                            mCameraHandle->ops, captureResultCb,
555                            &gCamCapability[mCameraId]->padding_info, this, newStream,
556                            width, height);
557                    } else
558                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
559                            mCameraHandle->ops, captureResultCb,
560                            &gCamCapability[mCameraId]->padding_info, this, newStream);
561                    if (channel == NULL) {
562                        ALOGE("%s: allocation of channel failed", __func__);
563                        pthread_mutex_unlock(&mMutex);
564                        return -ENOMEM;
565                    }
566
567                    newStream->priv = channel;
568                    break;
569                case HAL_PIXEL_FORMAT_BLOB:
570                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
571                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
572                            mCameraHandle->ops, captureResultCb,
573                            &gCamCapability[mCameraId]->padding_info, this, newStream);
574                    if (mPictureChannel == NULL) {
575                        ALOGE("%s: allocation of channel failed", __func__);
576                        pthread_mutex_unlock(&mMutex);
577                        return -ENOMEM;
578                    }
579                    newStream->priv = (QCamera3Channel*)mPictureChannel;
580                    break;
581
582                //TODO: Add support for app consumed format?
583                default:
584                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
585                    break;
586                }
587            }
588        } else {
589            // Channel already exists for this stream
590            // Do nothing for now
591        }
592    }
593    /*For the streams to be reconfigured we need to register the buffers
594      since the framework wont*/
595    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
596            it != mStreamInfo.end(); it++) {
597        if ((*it)->status == RECONFIGURE) {
598            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
599            /*only register buffers for streams that have already been
600              registered*/
601            if ((*it)->registered) {
602                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
603                        (*it)->buffer_set.buffers);
604                if (rc != NO_ERROR) {
605                    ALOGE("%s: Failed to register the buffers of old stream,\
606                            rc = %d", __func__, rc);
607                }
608                ALOGV("%s: channel %p has %d buffers",
609                        __func__, channel, (*it)->buffer_set.num_buffers);
610            }
611        }
612
613        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
614        if (index == NAME_NOT_FOUND) {
615            mPendingBuffersMap.add((*it)->stream, 0);
616        } else {
617            mPendingBuffersMap.editValueAt(index) = 0;
618        }
619    }
620
621    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
622    mPendingRequestsList.clear();
623
624    //settings/parameters don't carry over for new configureStreams
625    memset(mParameters, 0, sizeof(parm_buffer_t));
626    mFirstRequest = true;
627
628    pthread_mutex_unlock(&mMutex);
629    return rc;
630}
631
632/*===========================================================================
633 * FUNCTION   : validateCaptureRequest
634 *
635 * DESCRIPTION: validate a capture request from camera service
636 *
637 * PARAMETERS :
638 *   @request : request from framework to process
639 *
640 * RETURN     :
641 *
642 *==========================================================================*/
643int QCamera3HardwareInterface::validateCaptureRequest(
644                    camera3_capture_request_t *request)
645{
646    ssize_t idx = 0;
647    const camera3_stream_buffer_t *b;
648    CameraMetadata meta;
649
650    /* Sanity check the request */
651    if (request == NULL) {
652        ALOGE("%s: NULL capture request", __func__);
653        return BAD_VALUE;
654    }
655
656    uint32_t frameNumber = request->frame_number;
657    if (request->input_buffer != NULL &&
658            request->input_buffer->stream != mInputStream) {
659        ALOGE("%s: Request %d: Input buffer not from input stream!",
660                __FUNCTION__, frameNumber);
661        return BAD_VALUE;
662    }
663    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
664        ALOGE("%s: Request %d: No output buffers provided!",
665                __FUNCTION__, frameNumber);
666        return BAD_VALUE;
667    }
668    if (request->input_buffer != NULL) {
669        b = request->input_buffer;
670        QCamera3Channel *channel =
671            static_cast<QCamera3Channel*>(b->stream->priv);
672        if (channel == NULL) {
673            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
674                    __func__, frameNumber, idx);
675            return BAD_VALUE;
676        }
677        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
678            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
679                    __func__, frameNumber, idx);
680            return BAD_VALUE;
681        }
682        if (b->release_fence != -1) {
683            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
684                    __func__, frameNumber, idx);
685            return BAD_VALUE;
686        }
687        if (b->buffer == NULL) {
688            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
689                    __func__, frameNumber, idx);
690            return BAD_VALUE;
691        }
692    }
693
694    // Validate all buffers
695    b = request->output_buffers;
696    do {
697        QCamera3Channel *channel =
698                static_cast<QCamera3Channel*>(b->stream->priv);
699        if (channel == NULL) {
700            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
701                    __func__, frameNumber, idx);
702            return BAD_VALUE;
703        }
704        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
705            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
706                    __func__, frameNumber, idx);
707            return BAD_VALUE;
708        }
709        if (b->release_fence != -1) {
710            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
711                    __func__, frameNumber, idx);
712            return BAD_VALUE;
713        }
714        if (b->buffer == NULL) {
715            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
716                    __func__, frameNumber, idx);
717            return BAD_VALUE;
718        }
719        idx++;
720        b = request->output_buffers + idx;
721    } while (idx < (ssize_t)request->num_output_buffers);
722
723    return NO_ERROR;
724}
725
726/*===========================================================================
727 * FUNCTION   : registerStreamBuffers
728 *
729 * DESCRIPTION: Register buffers for a given stream with the HAL device.
730 *
731 * PARAMETERS :
732 *   @stream_list : streams to be configured
733 *
734 * RETURN     :
735 *
736 *==========================================================================*/
737int QCamera3HardwareInterface::registerStreamBuffers(
738        const camera3_stream_buffer_set_t *buffer_set)
739{
740    int rc = 0;
741
742    pthread_mutex_lock(&mMutex);
743
744    if (buffer_set == NULL) {
745        ALOGE("%s: Invalid buffer_set parameter.", __func__);
746        pthread_mutex_unlock(&mMutex);
747        return -EINVAL;
748    }
749    if (buffer_set->stream == NULL) {
750        ALOGE("%s: Invalid stream parameter.", __func__);
751        pthread_mutex_unlock(&mMutex);
752        return -EINVAL;
753    }
754    if (buffer_set->num_buffers < 1) {
755        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
756        pthread_mutex_unlock(&mMutex);
757        return -EINVAL;
758    }
759    if (buffer_set->buffers == NULL) {
760        ALOGE("%s: Invalid buffers parameter.", __func__);
761        pthread_mutex_unlock(&mMutex);
762        return -EINVAL;
763    }
764
765    camera3_stream_t *stream = buffer_set->stream;
766    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
767
768    //set the buffer_set in the mStreamInfo array
769    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
770            it != mStreamInfo.end(); it++) {
771        if ((*it)->stream == stream) {
772            uint32_t numBuffers = buffer_set->num_buffers;
773            (*it)->buffer_set.stream = buffer_set->stream;
774            (*it)->buffer_set.num_buffers = numBuffers;
775            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
776            if ((*it)->buffer_set.buffers == NULL) {
777                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
778                pthread_mutex_unlock(&mMutex);
779                return -ENOMEM;
780            }
781            for (size_t j = 0; j < numBuffers; j++){
782                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
783            }
784            (*it)->registered = 1;
785        }
786    }
787    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
788    if (rc < 0) {
789        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
790        pthread_mutex_unlock(&mMutex);
791        return -ENODEV;
792    }
793
794    pthread_mutex_unlock(&mMutex);
795    return NO_ERROR;
796}
797
798/*===========================================================================
799 * FUNCTION   : processCaptureRequest
800 *
801 * DESCRIPTION: process a capture request from camera service
802 *
803 * PARAMETERS :
804 *   @request : request from framework to process
805 *
806 * RETURN     :
807 *
808 *==========================================================================*/
809int QCamera3HardwareInterface::processCaptureRequest(
810                    camera3_capture_request_t *request)
811{
812    int rc = NO_ERROR;
813    int32_t request_id;
814    CameraMetadata meta;
815
816    pthread_mutex_lock(&mMutex);
817
818    rc = validateCaptureRequest(request);
819    if (rc != NO_ERROR) {
820        ALOGE("%s: incoming request is not valid", __func__);
821        pthread_mutex_unlock(&mMutex);
822        return rc;
823    }
824
825    uint32_t frameNumber = request->frame_number;
826    uint32_t streamTypeMask = 0;
827
828    meta = request->settings;
829    if (meta.exists(ANDROID_REQUEST_ID)) {
830        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
831        mCurrentRequestId = request_id;
832        ALOGV("%s: Received request with id: %d",__func__, request_id);
833    } else if (mFirstRequest || mCurrentRequestId == -1){
834        ALOGE("%s: Unable to find request id field, \
835                & no previous id available", __func__);
836        return NAME_NOT_FOUND;
837    } else {
838        ALOGV("%s: Re-using old request id", __func__);
839        request_id = mCurrentRequestId;
840    }
841
842    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
843                                    __func__, __LINE__,
844                                    request->num_output_buffers,
845                                    request->input_buffer,
846                                    frameNumber);
847    // Acquire all request buffers first
848    int blob_request = 0;
849    for (size_t i = 0; i < request->num_output_buffers; i++) {
850        const camera3_stream_buffer_t& output = request->output_buffers[i];
851        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
852        sp<Fence> acquireFence = new Fence(output.acquire_fence);
853
854        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
855        //Call function to store local copy of jpeg data for encode params.
856            blob_request = 1;
857            rc = getJpegSettings(request->settings);
858            if (rc < 0) {
859                ALOGE("%s: failed to get jpeg parameters", __func__);
860                pthread_mutex_unlock(&mMutex);
861                return rc;
862            }
863        }
864
865        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
866        if (rc != OK) {
867            ALOGE("%s: fence wait failed %d", __func__, rc);
868            pthread_mutex_unlock(&mMutex);
869            return rc;
870        }
871        streamTypeMask |= channel->getStreamTypeMask();
872    }
873
874    rc = setFrameParameters(request->frame_number, request->settings, streamTypeMask);
875    if (rc < 0) {
876        ALOGE("%s: fail to set frame parameters", __func__);
877        pthread_mutex_unlock(&mMutex);
878        return rc;
879    }
880
881    /* Update pending request list and pending buffers map */
882    PendingRequestInfo pendingRequest;
883    pendingRequest.frame_number = frameNumber;
884    pendingRequest.num_buffers = request->num_output_buffers;
885    pendingRequest.request_id = request_id;
886    pendingRequest.blob_request = blob_request;
887
888    for (size_t i = 0; i < request->num_output_buffers; i++) {
889        RequestedBufferInfo requestedBuf;
890        requestedBuf.stream = request->output_buffers[i].stream;
891        requestedBuf.buffer = NULL;
892        pendingRequest.buffers.push_back(requestedBuf);
893
894        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
895    }
896    mPendingRequestsList.push_back(pendingRequest);
897
898    // Notify metadata channel we receive a request
899    mMetadataChannel->request(NULL, frameNumber);
900
901    // Call request on other streams
902    for (size_t i = 0; i < request->num_output_buffers; i++) {
903        const camera3_stream_buffer_t& output = request->output_buffers[i];
904        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
905        mm_camera_buf_def_t *pInputBuffer = NULL;
906
907        if (channel == NULL) {
908            ALOGE("%s: invalid channel pointer for stream", __func__);
909            continue;
910        }
911
912        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
913            QCamera3RegularChannel* inputChannel = NULL;
914            if(request->input_buffer != NULL){
915
916                //Try to get the internal format
917                inputChannel = (QCamera3RegularChannel*)
918                    request->input_buffer->stream->priv;
919                if(inputChannel == NULL ){
920                    ALOGE("%s: failed to get input channel handle", __func__);
921                } else {
922                    pInputBuffer =
923                        inputChannel->getInternalFormatBuffer(
924                                request->input_buffer->buffer);
925                    ALOGD("%s: Input buffer dump",__func__);
926                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
927                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
928                    ALOGD("frame len:%d", pInputBuffer->frame_len);
929                }
930            }
931            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
932                            pInputBuffer,(QCamera3Channel*)inputChannel);
933        } else {
934            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
935                __LINE__, output.buffer, frameNumber);
936            rc = channel->request(output.buffer, frameNumber);
937        }
938        if (rc < 0)
939            ALOGE("%s: request failed", __func__);
940    }
941
942    mFirstRequest = false;
943
944    //Block on conditional variable
945    mPendingRequest = 1;
946    while (mPendingRequest == 1) {
947        pthread_cond_wait(&mRequestCond, &mMutex);
948    }
949
950    pthread_mutex_unlock(&mMutex);
951    return rc;
952}
953
954/*===========================================================================
955 * FUNCTION   : getMetadataVendorTagOps
956 *
957 * DESCRIPTION:
958 *
959 * PARAMETERS :
960 *
961 *
962 * RETURN     :
963 *==========================================================================*/
964void QCamera3HardwareInterface::getMetadataVendorTagOps(
965                    vendor_tag_query_ops_t* /*ops*/)
966{
967    /* Enable locks when we eventually add Vendor Tags */
968    /*
969    pthread_mutex_lock(&mMutex);
970
971    pthread_mutex_unlock(&mMutex);
972    */
973    return;
974}
975
976/*===========================================================================
977 * FUNCTION   : dump
978 *
979 * DESCRIPTION:
980 *
981 * PARAMETERS :
982 *
983 *
984 * RETURN     :
985 *==========================================================================*/
986void QCamera3HardwareInterface::dump(int /*fd*/)
987{
988    /*Enable lock when we implement this function*/
989    /*
990    pthread_mutex_lock(&mMutex);
991
992    pthread_mutex_unlock(&mMutex);
993    */
994    return;
995}
996
997
998/*===========================================================================
999 * FUNCTION   : captureResultCb
1000 *
1001 * DESCRIPTION: Callback handler for all capture result
1002 *              (streams, as well as metadata)
1003 *
1004 * PARAMETERS :
1005 *   @metadata : metadata information
1006 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1007 *               NULL if metadata.
1008 *
1009 * RETURN     : NONE
1010 *==========================================================================*/
1011void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1012                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1013{
1014    pthread_mutex_lock(&mMutex);
1015
1016    if (metadata_buf) {
1017        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1018        int32_t frame_number_valid = *(int32_t *)
1019            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1020        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1021            CAM_INTF_META_PENDING_REQUESTS, metadata);
1022        uint32_t frame_number = *(uint32_t *)
1023            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1024        const struct timeval *tv = (const struct timeval *)
1025            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1026        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1027            tv->tv_usec * NSEC_PER_USEC;
1028
1029        if (!frame_number_valid) {
1030            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1031            mMetadataChannel->bufDone(metadata_buf);
1032            goto done_metadata;
1033        }
1034        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1035                frame_number, capture_time);
1036
1037        // Go through the pending requests info and send shutter/results to frameworks
1038        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1039                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1040            camera3_capture_result_t result;
1041            camera3_notify_msg_t notify_msg;
1042            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1043
1044            // Flush out all entries with less or equal frame numbers.
1045
1046            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1047            //Right now it's the same as metadata timestamp
1048
1049            //TODO: When there is metadata drop, how do we derive the timestamp of
1050            //dropped frames? For now, we fake the dropped timestamp by substracting
1051            //from the reported timestamp
1052            nsecs_t current_capture_time = capture_time -
1053                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1054
1055            // Send shutter notify to frameworks
1056            notify_msg.type = CAMERA3_MSG_SHUTTER;
1057            notify_msg.message.shutter.frame_number = i->frame_number;
1058            notify_msg.message.shutter.timestamp = current_capture_time;
1059            mCallbackOps->notify(mCallbackOps, &notify_msg);
1060            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1061                    i->frame_number, capture_time);
1062
1063            // Send empty metadata with already filled buffers for dropped metadata
1064            // and send valid metadata with already filled buffers for current metadata
1065            if (i->frame_number < frame_number) {
1066                CameraMetadata dummyMetadata;
1067                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1068                        &current_capture_time, 1);
1069                dummyMetadata.update(ANDROID_REQUEST_ID,
1070                        &(i->request_id), 1);
1071                result.result = dummyMetadata.release();
1072            } else {
1073                result.result = translateCbMetadataToResultMetadata(metadata,
1074                        current_capture_time, i->request_id);
1075                if (i->blob_request) {
1076                   //If it is a blob request then send the metadata to the picture channel
1077                   mPictureChannel->queueMetadata(metadata_buf);
1078
1079                } else {
1080                   // Return metadata buffer
1081                   mMetadataChannel->bufDone(metadata_buf);
1082                   free(metadata_buf);
1083                }
1084            }
1085            if (!result.result) {
1086                ALOGE("%s: metadata is NULL", __func__);
1087            }
1088            result.frame_number = i->frame_number;
1089            result.num_output_buffers = 0;
1090            result.output_buffers = NULL;
1091            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1092                    j != i->buffers.end(); j++) {
1093                if (j->buffer) {
1094                    result.num_output_buffers++;
1095                }
1096            }
1097
1098            if (result.num_output_buffers > 0) {
1099                camera3_stream_buffer_t *result_buffers =
1100                    new camera3_stream_buffer_t[result.num_output_buffers];
1101                if (!result_buffers) {
1102                    ALOGE("%s: Fatal error: out of memory", __func__);
1103                }
1104                size_t result_buffers_idx = 0;
1105                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1106                        j != i->buffers.end(); j++) {
1107                    if (j->buffer) {
1108                        result_buffers[result_buffers_idx++] = *(j->buffer);
1109                        free(j->buffer);
1110                        j->buffer = NULL;
1111                        mPendingBuffersMap.editValueFor(j->stream)--;
1112                    }
1113                }
1114                result.output_buffers = result_buffers;
1115
1116                mCallbackOps->process_capture_result(mCallbackOps, &result);
1117                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1118                        __func__, result.frame_number, current_capture_time);
1119                free_camera_metadata((camera_metadata_t *)result.result);
1120                delete[] result_buffers;
1121            } else {
1122                mCallbackOps->process_capture_result(mCallbackOps, &result);
1123                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1124                        __func__, result.frame_number, current_capture_time);
1125                free_camera_metadata((camera_metadata_t *)result.result);
1126            }
1127            // erase the element from the list
1128            i = mPendingRequestsList.erase(i);
1129        }
1130
1131
1132done_metadata:
1133        bool max_buffers_dequeued = false;
1134        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1135            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1136            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1137            if (queued_buffers == stream->max_buffers) {
1138                max_buffers_dequeued = true;
1139                break;
1140            }
1141        }
1142        if (!max_buffers_dequeued && !pending_requests) {
1143            // Unblock process_capture_request
1144            mPendingRequest = 0;
1145            pthread_cond_signal(&mRequestCond);
1146        }
1147    } else {
1148        // If the frame number doesn't exist in the pending request list,
1149        // directly send the buffer to the frameworks, and update pending buffers map
1150        // Otherwise, book-keep the buffer.
1151        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1152        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1153            i++;
1154        }
1155        if (i == mPendingRequestsList.end()) {
1156            // Verify all pending requests frame_numbers are greater
1157            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1158                    j != mPendingRequestsList.end(); j++) {
1159                if (j->frame_number < frame_number) {
1160                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1161                            __func__, j->frame_number, frame_number);
1162                }
1163            }
1164            camera3_capture_result_t result;
1165            result.result = NULL;
1166            result.frame_number = frame_number;
1167            result.num_output_buffers = 1;
1168            result.output_buffers = buffer;
1169            ALOGV("%s: result frame_number = %d, buffer = %p",
1170                    __func__, frame_number, buffer);
1171            mPendingBuffersMap.editValueFor(buffer->stream)--;
1172            mCallbackOps->process_capture_result(mCallbackOps, &result);
1173        } else {
1174            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1175                    j != i->buffers.end(); j++) {
1176                if (j->stream == buffer->stream) {
1177                    if (j->buffer != NULL) {
1178                        ALOGE("%s: Error: buffer is already set", __func__);
1179                    } else {
1180                        j->buffer = (camera3_stream_buffer_t *)malloc(
1181                                sizeof(camera3_stream_buffer_t));
1182                        *(j->buffer) = *buffer;
1183                        ALOGV("%s: cache buffer %p at result frame_number %d",
1184                                __func__, buffer, frame_number);
1185                    }
1186                }
1187            }
1188        }
1189    }
1190    pthread_mutex_unlock(&mMutex);
1191    return;
1192}
1193
1194/*===========================================================================
1195 * FUNCTION   : translateCbMetadataToResultMetadata
1196 *
1197 * DESCRIPTION:
1198 *
1199 * PARAMETERS :
1200 *   @metadata : metadata information from callback
1201 *
1202 * RETURN     : camera_metadata_t*
1203 *              metadata in a format specified by fwk
1204 *==========================================================================*/
1205camera_metadata_t*
1206QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1207                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1208                                 int32_t request_id)
1209{
1210    CameraMetadata camMetadata;
1211    camera_metadata_t* resultMetadata;
1212
1213    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1214    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1215
1216    /*CAM_INTF_META_HISTOGRAM - TODO*/
1217    /*cam_hist_stats_t  *histogram =
1218      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1219      metadata);*/
1220
1221    /*face detection*/
1222    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1223        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1224    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1225    int32_t faceIds[numFaces];
1226    uint8_t faceScores[numFaces];
1227    int32_t faceRectangles[numFaces * 4];
1228    int32_t faceLandmarks[numFaces * 6];
1229    int j = 0, k = 0;
1230    for (int i = 0; i < numFaces; i++) {
1231        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1232        faceScores[i] = faceDetectionInfo->faces[i].score;
1233        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1234                faceRectangles+j, -1);
1235        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1236        j+= 4;
1237        k+= 6;
1238    }
1239    if (numFaces > 0) {
1240        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1241        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1242        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1243            faceRectangles, numFaces*4);
1244        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1245            faceLandmarks, numFaces*6);
1246    }
1247
1248    uint8_t  *color_correct_mode =
1249        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1250    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1251
1252    int32_t  *ae_precapture_id =
1253        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1254    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1255
1256    /*aec regions*/
1257    cam_area_t  *hAeRegions =
1258        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1259    int32_t aeRegions[5];
1260    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1261    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1262    if(mIsZslMode) {
1263        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1264        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1265    } else {
1266        uint8_t *ae_state =
1267            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1268        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1269    }
1270    uint8_t  *focusMode =
1271        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1272    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1273
1274    /*af regions*/
1275    cam_area_t  *hAfRegions =
1276        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1277    int32_t afRegions[5];
1278    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1279    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1280
1281    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1282    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1283
1284    int32_t  *afTriggerId =
1285        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1286    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1287
1288    uint8_t  *whiteBalance =
1289        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1290    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1291
1292    /*awb regions*/
1293    cam_area_t  *hAwbRegions =
1294        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1295    int32_t awbRegions[5];
1296    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1297    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1298
1299    uint8_t  *whiteBalanceState =
1300        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1301    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1302
1303    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1304    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1305
1306    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1307    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1308
1309    uint8_t  *flashPower =
1310        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1311    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1312
1313    int64_t  *flashFiringTime =
1314        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1315    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1316
1317    /*int32_t  *ledMode =
1318      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1319      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1320
1321    uint8_t  *flashState =
1322        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1323    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1324
1325    uint8_t  *hotPixelMode =
1326        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1327    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1328
1329    float  *lensAperture =
1330        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1331    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1332
1333    float  *filterDensity =
1334        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1335    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1336
1337    float  *focalLength =
1338        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1339    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1340
1341    float  *focusDistance =
1342        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1343    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1344
1345    float  *focusRange =
1346        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1347    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1348
1349    uint8_t  *opticalStab =
1350        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1351    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1352
1353    /*int32_t  *focusState =
1354      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1355      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1356
1357    uint8_t  *noiseRedMode =
1358        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1359    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1360
1361    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1362
1363    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1364        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1365    int32_t scalerCropRegion[4];
1366    scalerCropRegion[0] = hScalerCropRegion->left;
1367    scalerCropRegion[1] = hScalerCropRegion->top;
1368    scalerCropRegion[2] = hScalerCropRegion->width;
1369    scalerCropRegion[3] = hScalerCropRegion->height;
1370    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1371
1372    int64_t  *sensorExpTime =
1373        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1374    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1375
1376    int64_t  *sensorFameDuration =
1377        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1378    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1379
1380    int32_t  *sensorSensitivity =
1381        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1382    mMetadataResponse.iso_speed = *sensorSensitivity;
1383    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1384
1385    uint8_t  *shadingMode =
1386        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1387    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1388
1389    uint8_t  *faceDetectMode =
1390        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1391    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1392
1393    uint8_t  *histogramMode =
1394        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1395    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1396
1397    uint8_t  *sharpnessMapMode =
1398        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1399    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1400            sharpnessMapMode, 1);
1401
1402    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1403    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1404        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1405    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1406            (int32_t*)sharpnessMap->sharpness,
1407            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1408
1409    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1410        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1411    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1412    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1413    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1414                       (float*)lensShadingMap->lens_shading,
1415                       4*map_width*map_height);
1416
1417    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1418        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1419    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1420
1421    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1422        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1423    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1424                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1425
1426    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1427        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1428    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1429                       predColorCorrectionGains->gains, 4);
1430
1431    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1432        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1433    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1434                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1435
1436    uint8_t *blackLevelLock = (uint8_t*)
1437        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1438    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1439
1440    uint8_t *sceneFlicker = (uint8_t*)
1441        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1442    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1443
1444
1445    resultMetadata = camMetadata.release();
1446    return resultMetadata;
1447}
1448
1449/*===========================================================================
1450 * FUNCTION   : convertToRegions
1451 *
1452 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1453 *
1454 * PARAMETERS :
1455 *   @rect   : cam_rect_t struct to convert
1456 *   @region : int32_t destination array
1457 *   @weight : if we are converting from cam_area_t, weight is valid
1458 *             else weight = -1
1459 *
1460 *==========================================================================*/
1461void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1462    region[0] = rect.left;
1463    region[1] = rect.top;
1464    region[2] = rect.left + rect.width;
1465    region[3] = rect.top + rect.height;
1466    if (weight > -1) {
1467        region[4] = weight;
1468    }
1469}
1470
1471/*===========================================================================
1472 * FUNCTION   : convertFromRegions
1473 *
1474 * DESCRIPTION: helper method to convert from array to cam_rect_t
1475 *
1476 * PARAMETERS :
1477 *   @rect   : cam_rect_t struct to convert
1478 *   @region : int32_t destination array
1479 *   @weight : if we are converting from cam_area_t, weight is valid
1480 *             else weight = -1
1481 *
1482 *==========================================================================*/
1483void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1484                                                   const camera_metadata_t *settings,
1485                                                   uint32_t tag){
1486    CameraMetadata frame_settings;
1487    frame_settings = settings;
1488    int32_t x_min = frame_settings.find(tag).data.i32[0];
1489    int32_t y_min = frame_settings.find(tag).data.i32[1];
1490    int32_t x_max = frame_settings.find(tag).data.i32[2];
1491    int32_t y_max = frame_settings.find(tag).data.i32[3];
1492    roi->weight = frame_settings.find(tag).data.i32[4];
1493    roi->rect.left = x_min;
1494    roi->rect.top = y_min;
1495    roi->rect.width = x_max - x_min;
1496    roi->rect.height = y_max - y_min;
1497}
1498
1499/*===========================================================================
1500 * FUNCTION   : resetIfNeededROI
1501 *
1502 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1503 *              crop region
1504 *
1505 * PARAMETERS :
1506 *   @roi       : cam_area_t struct to resize
1507 *   @scalerCropRegion : cam_crop_region_t region to compare against
1508 *
1509 *
1510 *==========================================================================*/
1511bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1512                                                 const cam_crop_region_t* scalerCropRegion)
1513{
1514    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1515    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1516    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1517    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1518    if ((roi_x_max < scalerCropRegion->left) ||
1519        (roi_y_max < scalerCropRegion->top)  ||
1520        (roi->rect.left > crop_x_max) ||
1521        (roi->rect.top > crop_y_max)){
1522        return false;
1523    }
1524    if (roi->rect.left < scalerCropRegion->left) {
1525        roi->rect.left = scalerCropRegion->left;
1526    }
1527    if (roi->rect.top < scalerCropRegion->top) {
1528        roi->rect.top = scalerCropRegion->top;
1529    }
1530    if (roi_x_max > crop_x_max) {
1531        roi_x_max = crop_x_max;
1532    }
1533    if (roi_y_max > crop_y_max) {
1534        roi_y_max = crop_y_max;
1535    }
1536    roi->rect.width = roi_x_max - roi->rect.left;
1537    roi->rect.height = roi_y_max - roi->rect.top;
1538    return true;
1539}
1540
1541/*===========================================================================
1542 * FUNCTION   : convertLandmarks
1543 *
1544 * DESCRIPTION: helper method to extract the landmarks from face detection info
1545 *
1546 * PARAMETERS :
1547 *   @face   : cam_rect_t struct to convert
1548 *   @landmarks : int32_t destination array
1549 *
1550 *
1551 *==========================================================================*/
1552void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1553{
1554    landmarks[0] = face.left_eye_center.x;
1555    landmarks[1] = face.left_eye_center.y;
1556    landmarks[2] = face.right_eye_center.y;
1557    landmarks[3] = face.right_eye_center.y;
1558    landmarks[4] = face.mouth_center.x;
1559    landmarks[5] = face.mouth_center.y;
1560}
1561
1562#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1563/*===========================================================================
1564 * FUNCTION   : initCapabilities
1565 *
1566 * DESCRIPTION: initialize camera capabilities in static data struct
1567 *
1568 * PARAMETERS :
1569 *   @cameraId  : camera Id
1570 *
1571 * RETURN     : int32_t type of status
1572 *              NO_ERROR  -- success
1573 *              none-zero failure code
1574 *==========================================================================*/
1575int QCamera3HardwareInterface::initCapabilities(int cameraId)
1576{
1577    int rc = 0;
1578    mm_camera_vtbl_t *cameraHandle = NULL;
1579    QCamera3HeapMemory *capabilityHeap = NULL;
1580
1581    cameraHandle = camera_open(cameraId);
1582    if (!cameraHandle) {
1583        ALOGE("%s: camera_open failed", __func__);
1584        rc = -1;
1585        goto open_failed;
1586    }
1587
1588    capabilityHeap = new QCamera3HeapMemory();
1589    if (capabilityHeap == NULL) {
1590        ALOGE("%s: creation of capabilityHeap failed", __func__);
1591        goto heap_creation_failed;
1592    }
1593    /* Allocate memory for capability buffer */
1594    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1595    if(rc != OK) {
1596        ALOGE("%s: No memory for cappability", __func__);
1597        goto allocate_failed;
1598    }
1599
1600    /* Map memory for capability buffer */
1601    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1602    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1603                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1604                                capabilityHeap->getFd(0),
1605                                sizeof(cam_capability_t));
1606    if(rc < 0) {
1607        ALOGE("%s: failed to map capability buffer", __func__);
1608        goto map_failed;
1609    }
1610
1611    /* Query Capability */
1612    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1613    if(rc < 0) {
1614        ALOGE("%s: failed to query capability",__func__);
1615        goto query_failed;
1616    }
1617    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1618    if (!gCamCapability[cameraId]) {
1619        ALOGE("%s: out of memory", __func__);
1620        goto query_failed;
1621    }
1622    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1623                                        sizeof(cam_capability_t));
1624    rc = 0;
1625
1626query_failed:
1627    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1628                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1629map_failed:
1630    capabilityHeap->deallocate();
1631allocate_failed:
1632    delete capabilityHeap;
1633heap_creation_failed:
1634    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1635    cameraHandle = NULL;
1636open_failed:
1637    return rc;
1638}
1639
1640/*===========================================================================
1641 * FUNCTION   : initParameters
1642 *
1643 * DESCRIPTION: initialize camera parameters
1644 *
1645 * PARAMETERS :
1646 *
1647 * RETURN     : int32_t type of status
1648 *              NO_ERROR  -- success
1649 *              none-zero failure code
1650 *==========================================================================*/
1651int QCamera3HardwareInterface::initParameters()
1652{
1653    int rc = 0;
1654
1655    //Allocate Set Param Buffer
1656    mParamHeap = new QCamera3HeapMemory();
1657    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1658    if(rc != OK) {
1659        rc = NO_MEMORY;
1660        ALOGE("Failed to allocate SETPARM Heap memory");
1661        delete mParamHeap;
1662        mParamHeap = NULL;
1663        return rc;
1664    }
1665
1666    //Map memory for parameters buffer
1667    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1668            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1669            mParamHeap->getFd(0),
1670            sizeof(parm_buffer_t));
1671    if(rc < 0) {
1672        ALOGE("%s:failed to map SETPARM buffer",__func__);
1673        rc = FAILED_TRANSACTION;
1674        mParamHeap->deallocate();
1675        delete mParamHeap;
1676        mParamHeap = NULL;
1677        return rc;
1678    }
1679
1680    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1681    return rc;
1682}
1683
1684/*===========================================================================
1685 * FUNCTION   : deinitParameters
1686 *
1687 * DESCRIPTION: de-initialize camera parameters
1688 *
1689 * PARAMETERS :
1690 *
1691 * RETURN     : NONE
1692 *==========================================================================*/
1693void QCamera3HardwareInterface::deinitParameters()
1694{
1695    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1696            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1697
1698    mParamHeap->deallocate();
1699    delete mParamHeap;
1700    mParamHeap = NULL;
1701
1702    mParameters = NULL;
1703}
1704
1705/*===========================================================================
1706 * FUNCTION   : calcMaxJpegSize
1707 *
1708 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1709 *
1710 * PARAMETERS :
1711 *
1712 * RETURN     : max_jpeg_size
1713 *==========================================================================*/
1714int QCamera3HardwareInterface::calcMaxJpegSize()
1715{
1716    int32_t max_jpeg_size = 0;
1717    int temp_width, temp_height;
1718    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1719        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1720        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1721        if (temp_width * temp_height > max_jpeg_size ) {
1722            max_jpeg_size = temp_width * temp_height;
1723        }
1724    }
1725    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1726    return max_jpeg_size;
1727}
1728
1729/*===========================================================================
1730 * FUNCTION   : initStaticMetadata
1731 *
1732 * DESCRIPTION: initialize the static metadata
1733 *
1734 * PARAMETERS :
1735 *   @cameraId  : camera Id
1736 *
1737 * RETURN     : int32_t type of status
1738 *              0  -- success
1739 *              non-zero failure code
1740 *==========================================================================*/
1741int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1742{
1743    int rc = 0;
1744    CameraMetadata staticInfo;
1745
1746    /* android.info: hardware level */
1747    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1748    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1749        &supportedHardwareLevel, 1);
1750
1751    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1752    /*HAL 3 only*/
1753    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1754                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1755
1756    /*hard coded for now but this should come from sensor*/
1757    float min_focus_distance;
1758    if(facingBack){
1759        min_focus_distance = 10;
1760    } else {
1761        min_focus_distance = 0;
1762    }
1763    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1764                    &min_focus_distance, 1);
1765
1766    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1767                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1768
1769    /*should be using focal lengths but sensor doesn't provide that info now*/
1770    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1771                      &gCamCapability[cameraId]->focal_length,
1772                      1);
1773
1774    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1775                      gCamCapability[cameraId]->apertures,
1776                      gCamCapability[cameraId]->apertures_count);
1777
1778    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1779                gCamCapability[cameraId]->filter_densities,
1780                gCamCapability[cameraId]->filter_densities_count);
1781
1782
1783    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1784                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1785                      gCamCapability[cameraId]->optical_stab_modes_count);
1786
1787    staticInfo.update(ANDROID_LENS_POSITION,
1788                      gCamCapability[cameraId]->lens_position,
1789                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1790
1791    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1792                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1793    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1794                      lens_shading_map_size,
1795                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1796
1797    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1798                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1799    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1800            geo_correction_map_size,
1801            sizeof(geo_correction_map_size)/sizeof(int32_t));
1802
1803    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1804                       gCamCapability[cameraId]->geo_correction_map,
1805                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1806
1807    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1808            gCamCapability[cameraId]->sensor_physical_size, 2);
1809
1810    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1811            gCamCapability[cameraId]->exposure_time_range, 2);
1812
1813    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1814            &gCamCapability[cameraId]->max_frame_duration, 1);
1815
1816
1817    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1818                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1819
1820    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1821                                               gCamCapability[cameraId]->pixel_array_size.height};
1822    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1823                      pixel_array_size, 2);
1824
1825    int32_t active_array_size[] = {0, 0,
1826                                                gCamCapability[cameraId]->active_array_size.width,
1827                                                gCamCapability[cameraId]->active_array_size.height};
1828    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1829                      active_array_size, 4);
1830
1831    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1832            &gCamCapability[cameraId]->white_level, 1);
1833
1834    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1835            gCamCapability[cameraId]->black_level_pattern, 4);
1836
1837    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1838                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1839
1840    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1841                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1842
1843    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1844                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1845    /*hardcode 0 for now*/
1846    int32_t max_face_count = 0;
1847    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1848                      &max_face_count, 1);
1849
1850    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1851                      &gCamCapability[cameraId]->histogram_size, 1);
1852
1853    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1854            &gCamCapability[cameraId]->max_histogram_count, 1);
1855
1856    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1857                                                gCamCapability[cameraId]->sharpness_map_size.height};
1858
1859    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1860            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1861
1862    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1863            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1864
1865
1866    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1867                      &gCamCapability[cameraId]->raw_min_duration,
1868                       1);
1869
1870    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1871                                                HAL_PIXEL_FORMAT_BLOB};
1872    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1873    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1874                      scalar_formats,
1875                      scalar_formats_count);
1876
1877    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1878    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1879              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1880              available_processed_sizes);
1881    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1882                available_processed_sizes,
1883                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1884    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1885                      &gCamCapability[cameraId]->min_duration[0],
1886                      gCamCapability[cameraId]->supported_sizes_tbl_cnt);
1887
1888    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1889    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1890                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1891                 available_fps_ranges);
1892    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1893            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1894
1895    camera_metadata_rational exposureCompensationStep = {
1896            gCamCapability[cameraId]->exp_compensation_step.numerator,
1897            gCamCapability[cameraId]->exp_compensation_step.denominator};
1898    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1899                      &exposureCompensationStep, 1);
1900
1901    /*TO DO*/
1902    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1903    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1904                      availableVstabModes, sizeof(availableVstabModes));
1905
1906    /*HAL 1 and HAL 3 common*/
1907    float maxZoom = 4;
1908    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1909            &maxZoom, 1);
1910
1911    int32_t max3aRegions = 1;
1912    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1913            &max3aRegions, 1);
1914
1915    uint8_t availableFaceDetectModes[] = {
1916            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1917    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1918                      availableFaceDetectModes,
1919                      sizeof(availableFaceDetectModes));
1920
1921    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1922                                       gCamCapability[cameraId]->raw_dim.height};
1923    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1924                      raw_size,
1925                      sizeof(raw_size)/sizeof(uint32_t));
1926
1927    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1928                                                        gCamCapability[cameraId]->exposure_compensation_max};
1929    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1930            exposureCompensationRange,
1931            sizeof(exposureCompensationRange)/sizeof(int32_t));
1932
1933    uint8_t lensFacing = (facingBack) ?
1934            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1935    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1936
1937    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1938    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1939              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1940              available_jpeg_sizes);
1941    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1942                available_jpeg_sizes,
1943                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1944
1945    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1946                      available_thumbnail_sizes,
1947                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1948
1949    int32_t max_jpeg_size = 0;
1950    int temp_width, temp_height;
1951    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1952        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1953        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1954        if (temp_width * temp_height > max_jpeg_size ) {
1955            max_jpeg_size = temp_width * temp_height;
1956        }
1957    }
1958    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1959    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1960                      &max_jpeg_size, 1);
1961
1962    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1963    int32_t size = 0;
1964    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1965        int val = lookupFwkName(EFFECT_MODES_MAP,
1966                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1967                                   gCamCapability[cameraId]->supported_effects[i]);
1968        if (val != NAME_NOT_FOUND) {
1969            avail_effects[size] = (uint8_t)val;
1970            size++;
1971        }
1972    }
1973    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1974                      avail_effects,
1975                      size);
1976
1977    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1978    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1979    int32_t supported_scene_modes_cnt = 0;
1980    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1981        int val = lookupFwkName(SCENE_MODES_MAP,
1982                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1983                                gCamCapability[cameraId]->supported_scene_modes[i]);
1984        if (val != NAME_NOT_FOUND) {
1985            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1986            supported_indexes[supported_scene_modes_cnt] = i;
1987            supported_scene_modes_cnt++;
1988        }
1989    }
1990
1991    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1992                      avail_scene_modes,
1993                      supported_scene_modes_cnt);
1994
1995    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1996    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1997                      supported_scene_modes_cnt,
1998                      scene_mode_overrides,
1999                      supported_indexes,
2000                      cameraId);
2001    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2002                      scene_mode_overrides,
2003                      supported_scene_modes_cnt*3);
2004
2005    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2006    size = 0;
2007    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2008        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2009                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2010                                 gCamCapability[cameraId]->supported_antibandings[i]);
2011        if (val != NAME_NOT_FOUND) {
2012            avail_antibanding_modes[size] = (uint8_t)val;
2013            size++;
2014        }
2015
2016    }
2017    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2018                      avail_antibanding_modes,
2019                      size);
2020
2021    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2022    size = 0;
2023    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2024        int val = lookupFwkName(FOCUS_MODES_MAP,
2025                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2026                                gCamCapability[cameraId]->supported_focus_modes[i]);
2027        if (val != NAME_NOT_FOUND) {
2028            avail_af_modes[size] = (uint8_t)val;
2029            size++;
2030        }
2031    }
2032    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2033                      avail_af_modes,
2034                      size);
2035
2036    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2037    size = 0;
2038    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2039        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2040                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2041                                    gCamCapability[cameraId]->supported_white_balances[i]);
2042        if (val != NAME_NOT_FOUND) {
2043            avail_awb_modes[size] = (uint8_t)val;
2044            size++;
2045        }
2046    }
2047    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2048                      avail_awb_modes,
2049                      size);
2050
2051    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2052    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2053      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2054
2055    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2056            available_flash_levels,
2057            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2058
2059    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
2060    size = 0;
2061    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
2062        int val = lookupFwkName(FLASH_MODES_MAP,
2063                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
2064                                gCamCapability[cameraId]->supported_flash_modes[i]);
2065        if (val != NAME_NOT_FOUND) {
2066            avail_flash_modes[size] = (uint8_t)val;
2067            size++;
2068        }
2069    }
2070    static uint8_t flashAvailable = 0;
2071    if (size > 1) {
2072        //flash is supported
2073        flashAvailable = 1;
2074    }
2075    staticInfo.update(ANDROID_FLASH_MODE,
2076                      avail_flash_modes,
2077                      size);
2078
2079    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2080            &flashAvailable, 1);
2081
2082    uint8_t avail_ae_modes[5];
2083    size = 0;
2084    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2085        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2086        size++;
2087    }
2088    if (flashAvailable) {
2089        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2090        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2091        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2092    }
2093    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2094                      avail_ae_modes,
2095                      size);
2096
2097    int32_t sensitivity_range[2];
2098    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2099    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2100    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2101                      sensitivity_range,
2102                      sizeof(sensitivity_range) / sizeof(int32_t));
2103
2104    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2105                      &gCamCapability[cameraId]->max_analog_sensitivity,
2106                      sizeof(int32_t) );
2107    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2108                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2109                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2110
2111    gStaticMetadata[cameraId] = staticInfo.release();
2112    return rc;
2113}
2114
2115/*===========================================================================
2116 * FUNCTION   : makeTable
2117 *
2118 * DESCRIPTION: make a table of sizes
2119 *
2120 * PARAMETERS :
2121 *
2122 *
2123 *==========================================================================*/
2124void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2125                                          int32_t* sizeTable)
2126{
2127    int j = 0;
2128    for (int i = 0; i < size; i++) {
2129        sizeTable[j] = dimTable[i].width;
2130        sizeTable[j+1] = dimTable[i].height;
2131        j+=2;
2132    }
2133}
2134
2135/*===========================================================================
2136 * FUNCTION   : makeFPSTable
2137 *
2138 * DESCRIPTION: make a table of fps ranges
2139 *
2140 * PARAMETERS :
2141 *
2142 *==========================================================================*/
2143void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2144                                          int32_t* fpsRangesTable)
2145{
2146    int j = 0;
2147    for (int i = 0; i < size; i++) {
2148        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2149        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2150        j+=2;
2151    }
2152}
2153
2154/*===========================================================================
2155 * FUNCTION   : makeOverridesList
2156 *
2157 * DESCRIPTION: make a list of scene mode overrides
2158 *
2159 * PARAMETERS :
2160 *
2161 *
2162 *==========================================================================*/
2163void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2164                                                  uint8_t size, uint8_t* overridesList,
2165                                                  uint8_t* supported_indexes,
2166                                                  int camera_id)
2167{
2168    /*daemon will give a list of overrides for all scene modes.
2169      However we should send the fwk only the overrides for the scene modes
2170      supported by the framework*/
2171    int j = 0, index = 0, supt = 0;
2172    uint8_t focus_override;
2173    for (int i = 0; i < size; i++) {
2174        supt = 0;
2175        index = supported_indexes[i];
2176        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2177        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2178                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2179                                                    overridesTable[index].awb_mode);
2180        focus_override = (uint8_t)overridesTable[index].af_mode;
2181        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2182           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2183              supt = 1;
2184              break;
2185           }
2186        }
2187        if (supt) {
2188           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2189                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2190                                              focus_override);
2191        } else {
2192           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2193        }
2194        j+=3;
2195    }
2196}
2197
2198/*===========================================================================
2199 * FUNCTION   : getPreviewHalPixelFormat
2200 *
2201 * DESCRIPTION: convert the format to type recognized by framework
2202 *
2203 * PARAMETERS : format : the format from backend
2204 *
2205 ** RETURN    : format recognized by framework
2206 *
2207 *==========================================================================*/
2208int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2209{
2210    int32_t halPixelFormat;
2211
2212    switch (format) {
2213    case CAM_FORMAT_YUV_420_NV12:
2214        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2215        break;
2216    case CAM_FORMAT_YUV_420_NV21:
2217        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2218        break;
2219    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2220        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2221        break;
2222    case CAM_FORMAT_YUV_420_YV12:
2223        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2224        break;
2225    case CAM_FORMAT_YUV_422_NV16:
2226    case CAM_FORMAT_YUV_422_NV61:
2227    default:
2228        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2229        break;
2230    }
2231    return halPixelFormat;
2232}
2233
2234/*===========================================================================
2235 * FUNCTION   : getSensorSensitivity
2236 *
2237 * DESCRIPTION: convert iso_mode to an integer value
2238 *
2239 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2240 *
2241 ** RETURN    : sensitivity supported by sensor
2242 *
2243 *==========================================================================*/
2244int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2245{
2246    int32_t sensitivity;
2247
2248    switch (iso_mode) {
2249    case CAM_ISO_MODE_100:
2250        sensitivity = 100;
2251        break;
2252    case CAM_ISO_MODE_200:
2253        sensitivity = 200;
2254        break;
2255    case CAM_ISO_MODE_400:
2256        sensitivity = 400;
2257        break;
2258    case CAM_ISO_MODE_800:
2259        sensitivity = 800;
2260        break;
2261    case CAM_ISO_MODE_1600:
2262        sensitivity = 1600;
2263        break;
2264    default:
2265        sensitivity = -1;
2266        break;
2267    }
2268    return sensitivity;
2269}
2270
2271
2272/*===========================================================================
2273 * FUNCTION   : AddSetParmEntryToBatch
2274 *
2275 * DESCRIPTION: add set parameter entry into batch
2276 *
2277 * PARAMETERS :
2278 *   @p_table     : ptr to parameter buffer
2279 *   @paramType   : parameter type
2280 *   @paramLength : length of parameter value
2281 *   @paramValue  : ptr to parameter value
2282 *
2283 * RETURN     : int32_t type of status
2284 *              NO_ERROR  -- success
2285 *              none-zero failure code
2286 *==========================================================================*/
2287int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2288                                                          cam_intf_parm_type_t paramType,
2289                                                          uint32_t paramLength,
2290                                                          void *paramValue)
2291{
2292    int position = paramType;
2293    int current, next;
2294
2295    /*************************************************************************
2296    *                 Code to take care of linking next flags                *
2297    *************************************************************************/
2298    current = GET_FIRST_PARAM_ID(p_table);
2299    if (position == current){
2300        //DO NOTHING
2301    } else if (position < current){
2302        SET_NEXT_PARAM_ID(position, p_table, current);
2303        SET_FIRST_PARAM_ID(p_table, position);
2304    } else {
2305        /* Search for the position in the linked list where we need to slot in*/
2306        while (position > GET_NEXT_PARAM_ID(current, p_table))
2307            current = GET_NEXT_PARAM_ID(current, p_table);
2308
2309        /*If node already exists no need to alter linking*/
2310        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2311            next = GET_NEXT_PARAM_ID(current, p_table);
2312            SET_NEXT_PARAM_ID(current, p_table, position);
2313            SET_NEXT_PARAM_ID(position, p_table, next);
2314        }
2315    }
2316
2317    /*************************************************************************
2318    *                   Copy contents into entry                             *
2319    *************************************************************************/
2320
2321    if (paramLength > sizeof(parm_type_t)) {
2322        ALOGE("%s:Size of input larger than max entry size",__func__);
2323        return BAD_VALUE;
2324    }
2325    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2326    return NO_ERROR;
2327}
2328
2329/*===========================================================================
2330 * FUNCTION   : lookupFwkName
2331 *
2332 * DESCRIPTION: In case the enum is not same in fwk and backend
2333 *              make sure the parameter is correctly propogated
2334 *
2335 * PARAMETERS  :
2336 *   @arr      : map between the two enums
2337 *   @len      : len of the map
2338 *   @hal_name : name of the hal_parm to map
2339 *
2340 * RETURN     : int type of status
2341 *              fwk_name  -- success
2342 *              none-zero failure code
2343 *==========================================================================*/
2344int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2345                                             int len, int hal_name)
2346{
2347
2348    for (int i = 0; i < len; i++) {
2349        if (arr[i].hal_name == hal_name)
2350            return arr[i].fwk_name;
2351    }
2352
2353    /* Not able to find matching framework type is not necessarily
2354     * an error case. This happens when mm-camera supports more attributes
2355     * than the frameworks do */
2356    ALOGD("%s: Cannot find matching framework type", __func__);
2357    return NAME_NOT_FOUND;
2358}
2359
2360/*===========================================================================
2361 * FUNCTION   : lookupHalName
2362 *
2363 * DESCRIPTION: In case the enum is not same in fwk and backend
2364 *              make sure the parameter is correctly propogated
2365 *
2366 * PARAMETERS  :
2367 *   @arr      : map between the two enums
2368 *   @len      : len of the map
2369 *   @fwk_name : name of the hal_parm to map
2370 *
2371 * RETURN     : int32_t type of status
2372 *              hal_name  -- success
2373 *              none-zero failure code
2374 *==========================================================================*/
2375int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2376                                             int len, int fwk_name)
2377{
2378    for (int i = 0; i < len; i++) {
2379       if (arr[i].fwk_name == fwk_name)
2380           return arr[i].hal_name;
2381    }
2382    ALOGE("%s: Cannot find matching hal type", __func__);
2383    return NAME_NOT_FOUND;
2384}
2385
2386/*===========================================================================
2387 * FUNCTION   : getCapabilities
2388 *
2389 * DESCRIPTION: query camera capabilities
2390 *
2391 * PARAMETERS :
2392 *   @cameraId  : camera Id
2393 *   @info      : camera info struct to be filled in with camera capabilities
2394 *
2395 * RETURN     : int32_t type of status
2396 *              NO_ERROR  -- success
2397 *              none-zero failure code
2398 *==========================================================================*/
2399int QCamera3HardwareInterface::getCamInfo(int cameraId,
2400                                    struct camera_info *info)
2401{
2402    int rc = 0;
2403
2404    if (NULL == gCamCapability[cameraId]) {
2405        rc = initCapabilities(cameraId);
2406        if (rc < 0) {
2407            //pthread_mutex_unlock(&g_camlock);
2408            return rc;
2409        }
2410    }
2411
2412    if (NULL == gStaticMetadata[cameraId]) {
2413        rc = initStaticMetadata(cameraId);
2414        if (rc < 0) {
2415            return rc;
2416        }
2417    }
2418
2419    switch(gCamCapability[cameraId]->position) {
2420    case CAM_POSITION_BACK:
2421        info->facing = CAMERA_FACING_BACK;
2422        break;
2423
2424    case CAM_POSITION_FRONT:
2425        info->facing = CAMERA_FACING_FRONT;
2426        break;
2427
2428    default:
2429        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2430        rc = -1;
2431        break;
2432    }
2433
2434
2435    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2436    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2437    info->static_camera_characteristics = gStaticMetadata[cameraId];
2438
2439    return rc;
2440}
2441
2442/*===========================================================================
2443 * FUNCTION   : translateMetadata
2444 *
2445 * DESCRIPTION: translate the metadata into camera_metadata_t
2446 *
2447 * PARAMETERS : type of the request
2448 *
2449 *
2450 * RETURN     : success: camera_metadata_t*
2451 *              failure: NULL
2452 *
2453 *==========================================================================*/
2454camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2455{
2456    pthread_mutex_lock(&mMutex);
2457
2458    if (mDefaultMetadata[type] != NULL) {
2459        pthread_mutex_unlock(&mMutex);
2460        return mDefaultMetadata[type];
2461    }
2462    //first time we are handling this request
2463    //fill up the metadata structure using the wrapper class
2464    CameraMetadata settings;
2465    //translate from cam_capability_t to camera_metadata_tag_t
2466    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2467    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2468
2469    /*control*/
2470
2471    uint8_t controlIntent = 0;
2472    switch (type) {
2473      case CAMERA3_TEMPLATE_PREVIEW:
2474        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2475        break;
2476      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2477        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2478        break;
2479      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2480        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2481        break;
2482      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2483        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2484        break;
2485      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2486        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2487        break;
2488      default:
2489        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2490        break;
2491    }
2492    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2493
2494    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2495            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2496
2497    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2498    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2499
2500    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2501    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2502
2503    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2504    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2505
2506    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2507    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2508
2509    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2510    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2511
2512    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2513    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2514
2515    static uint8_t focusMode;
2516    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2517        ALOGE("%s: Setting focus mode to auto", __func__);
2518        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2519    } else {
2520        ALOGE("%s: Setting focus mode to off", __func__);
2521        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2522    }
2523    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2524
2525    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2526    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2527
2528    /*flash*/
2529    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2530    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2531
2532    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2533    settings.update(ANDROID_FLASH_FIRING_POWER,
2534            &flashFiringLevel, 1);
2535
2536    /* lens */
2537    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2538    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2539
2540    if (gCamCapability[mCameraId]->filter_densities_count) {
2541        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2542        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2543                        gCamCapability[mCameraId]->filter_densities_count);
2544    }
2545
2546    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2547    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2548
2549    mDefaultMetadata[type] = settings.release();
2550
2551    pthread_mutex_unlock(&mMutex);
2552    return mDefaultMetadata[type];
2553}
2554
2555/*===========================================================================
2556 * FUNCTION   : setFrameParameters
2557 *
2558 * DESCRIPTION: set parameters per frame as requested in the metadata from
2559 *              framework
2560 *
2561 * PARAMETERS :
2562 *   @frame_id  : frame number for this particular request
2563 *   @settings  : frame settings information from framework
2564 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2565 *
2566 * RETURN     : success: NO_ERROR
2567 *              failure:
2568 *==========================================================================*/
2569int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2570                    const camera_metadata_t *settings, uint32_t streamTypeMask)
2571{
2572    /*translate from camera_metadata_t type to parm_type_t*/
2573    int rc = 0;
2574    if (settings == NULL && mFirstRequest) {
2575        /*settings cannot be null for the first request*/
2576        return BAD_VALUE;
2577    }
2578
2579    int32_t hal_version = CAM_HAL_V3;
2580
2581    memset(mParameters, 0, sizeof(parm_buffer_t));
2582    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2583    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2584                sizeof(hal_version), &hal_version);
2585
2586    /*we need to update the frame number in the parameters*/
2587    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2588                                sizeof(frame_id), &frame_id);
2589    if (rc < 0) {
2590        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2591        return BAD_VALUE;
2592    }
2593
2594    /* Update stream id mask where buffers are requested */
2595    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2596                                sizeof(streamTypeMask), &streamTypeMask);
2597    if (rc < 0) {
2598        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2599        return BAD_VALUE;
2600    }
2601
2602    if(settings != NULL){
2603        rc = translateMetadataToParameters(settings);
2604    }
2605    /*set the parameters to backend*/
2606    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2607    return rc;
2608}
2609
2610/*===========================================================================
2611 * FUNCTION   : translateMetadataToParameters
2612 *
2613 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2614 *
2615 *
2616 * PARAMETERS :
2617 *   @settings  : frame settings information from framework
2618 *
2619 *
2620 * RETURN     : success: NO_ERROR
2621 *              failure:
2622 *==========================================================================*/
2623int QCamera3HardwareInterface::translateMetadataToParameters
2624                                  (const camera_metadata_t *settings)
2625{
2626    int rc = 0;
2627    CameraMetadata frame_settings;
2628    frame_settings = settings;
2629
2630
2631    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2632        int32_t antibandingMode =
2633            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2634        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2635                sizeof(antibandingMode), &antibandingMode);
2636    }
2637
2638    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2639        int32_t expCompensation = frame_settings.find(
2640            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2641        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2642            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2643        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2644            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2645        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2646          sizeof(expCompensation), &expCompensation);
2647    }
2648
2649    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2650        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2651        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2652                sizeof(aeLock), &aeLock);
2653    }
2654    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2655        cam_fps_range_t fps_range;
2656        fps_range.min_fps =
2657            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2658        fps_range.max_fps =
2659            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2660        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2661                sizeof(fps_range), &fps_range);
2662    }
2663
2664    float focalDistance = -1.0;
2665    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2666        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2667        rc = AddSetParmEntryToBatch(mParameters,
2668                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2669                sizeof(focalDistance), &focalDistance);
2670    }
2671
2672    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2673        uint8_t fwk_focusMode =
2674            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2675        uint8_t focusMode;
2676        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2677            focusMode = CAM_FOCUS_MODE_INFINITY;
2678        } else{
2679         focusMode = lookupHalName(FOCUS_MODES_MAP,
2680                                   sizeof(FOCUS_MODES_MAP),
2681                                   fwk_focusMode);
2682        }
2683        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2684                sizeof(focusMode), &focusMode);
2685    }
2686
2687    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2688        uint8_t awbLock =
2689            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2690        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2691                sizeof(awbLock), &awbLock);
2692    }
2693
2694    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2695        uint8_t fwk_whiteLevel =
2696            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2697        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2698                sizeof(WHITE_BALANCE_MODES_MAP),
2699                fwk_whiteLevel);
2700        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2701                sizeof(whiteLevel), &whiteLevel);
2702    }
2703
2704    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2705        uint8_t fwk_effectMode =
2706            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2707        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2708                sizeof(EFFECT_MODES_MAP),
2709                fwk_effectMode);
2710        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2711                sizeof(effectMode), &effectMode);
2712    }
2713
2714    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2715        uint8_t fwk_aeMode =
2716            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2717        uint8_t aeMode;
2718        int32_t redeye;
2719
2720        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2721            aeMode = CAM_AE_MODE_OFF;
2722        } else {
2723            aeMode = CAM_AE_MODE_ON;
2724        }
2725        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2726            redeye = 1;
2727        } else {
2728            redeye = 0;
2729        }
2730
2731        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2732                                          sizeof(AE_FLASH_MODE_MAP),
2733                                          fwk_aeMode);
2734        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2735                sizeof(aeMode), &aeMode);
2736        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2737                sizeof(flashMode), &flashMode);
2738        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2739                sizeof(redeye), &redeye);
2740    }
2741
2742    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2743        uint8_t colorCorrectMode =
2744            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2745        rc =
2746            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2747                    sizeof(colorCorrectMode), &colorCorrectMode);
2748    }
2749
2750    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2751        cam_color_correct_gains_t colorCorrectGains;
2752        for (int i = 0; i < 4; i++) {
2753            colorCorrectGains.gains[i] =
2754                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2755        }
2756        rc =
2757            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2758                    sizeof(colorCorrectGains), &colorCorrectGains);
2759    }
2760
2761    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2762        cam_color_correct_matrix_t colorCorrectTransform;
2763        cam_rational_type_t transform_elem;
2764        int num = 0;
2765        for (int i = 0; i < 3; i++) {
2766           for (int j = 0; j < 3; j++) {
2767              transform_elem.numerator =
2768                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2769              transform_elem.denominator =
2770                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2771              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2772              num++;
2773           }
2774        }
2775        rc =
2776            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
2777                    sizeof(colorCorrectTransform), &colorCorrectTransform);
2778    }
2779
2780    cam_trigger_t aecTrigger;
2781    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2782    aecTrigger.trigger_id = -1;
2783    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2784        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2785        aecTrigger.trigger =
2786            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2787        aecTrigger.trigger_id =
2788            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2789    }
2790    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2791                                sizeof(aecTrigger), &aecTrigger);
2792
2793    /*af_trigger must come with a trigger id*/
2794    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2795        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2796        cam_trigger_t af_trigger;
2797        af_trigger.trigger =
2798            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2799        af_trigger.trigger_id =
2800            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2801        rc = AddSetParmEntryToBatch(mParameters,
2802                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2803    }
2804
2805    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2806        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2807        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2808                sizeof(metaMode), &metaMode);
2809        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2810           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2811           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2812                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2813                                             fwk_sceneMode);
2814           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2815                sizeof(sceneMode), &sceneMode);
2816        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2817           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2818           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2819                sizeof(sceneMode), &sceneMode);
2820        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2821           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2822           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2823                sizeof(sceneMode), &sceneMode);
2824        }
2825    }
2826
2827    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2828        int32_t demosaic =
2829            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2830        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2831                sizeof(demosaic), &demosaic);
2832    }
2833
2834    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2835        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2836        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
2837                sizeof(edgeMode), &edgeMode);
2838    }
2839
2840    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2841        int32_t edgeStrength =
2842            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2843        rc = AddSetParmEntryToBatch(mParameters,
2844                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2845    }
2846
2847    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2848        int32_t respectFlashMode = 1;
2849        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2850            uint8_t fwk_aeMode =
2851                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2852            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
2853                respectFlashMode = 0;
2854                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
2855                    __func__);
2856            }
2857        }
2858        if (respectFlashMode) {
2859            uint8_t flashMode =
2860                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2861            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
2862                                          sizeof(FLASH_MODES_MAP),
2863                                          flashMode);
2864            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
2865            // To check: CAM_INTF_META_FLASH_MODE usage
2866            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2867                          sizeof(flashMode), &flashMode);
2868        }
2869    }
2870
2871    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2872        uint8_t flashPower =
2873            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2874        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2875                sizeof(flashPower), &flashPower);
2876    }
2877
2878    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2879        int64_t flashFiringTime =
2880            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2881        rc = AddSetParmEntryToBatch(mParameters,
2882                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2883    }
2884
2885    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2886        uint8_t geometricMode =
2887            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2888        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2889                sizeof(geometricMode), &geometricMode);
2890    }
2891
2892    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2893        uint8_t geometricStrength =
2894            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2895        rc = AddSetParmEntryToBatch(mParameters,
2896                CAM_INTF_META_GEOMETRIC_STRENGTH,
2897                sizeof(geometricStrength), &geometricStrength);
2898    }
2899
2900    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2901        uint8_t hotPixelMode =
2902            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2903        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2904                sizeof(hotPixelMode), &hotPixelMode);
2905    }
2906
2907    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2908        float lensAperture =
2909            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2910        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2911                sizeof(lensAperture), &lensAperture);
2912    }
2913
2914    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2915        float filterDensity =
2916            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2917        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2918                sizeof(filterDensity), &filterDensity);
2919    }
2920
2921    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2922        float focalLength =
2923            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2924        rc = AddSetParmEntryToBatch(mParameters,
2925                CAM_INTF_META_LENS_FOCAL_LENGTH,
2926                sizeof(focalLength), &focalLength);
2927    }
2928
2929    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2930        uint8_t optStabMode =
2931            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2932        rc = AddSetParmEntryToBatch(mParameters,
2933                CAM_INTF_META_LENS_OPT_STAB_MODE,
2934                sizeof(optStabMode), &optStabMode);
2935    }
2936
2937    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2938        uint8_t noiseRedMode =
2939            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2940        rc = AddSetParmEntryToBatch(mParameters,
2941                CAM_INTF_META_NOISE_REDUCTION_MODE,
2942                sizeof(noiseRedMode), &noiseRedMode);
2943    }
2944
2945    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2946        uint8_t noiseRedStrength =
2947            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2948        rc = AddSetParmEntryToBatch(mParameters,
2949                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2950                sizeof(noiseRedStrength), &noiseRedStrength);
2951    }
2952
2953    cam_crop_region_t scalerCropRegion;
2954    bool scalerCropSet = false;
2955    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2956        scalerCropRegion.left =
2957            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2958        scalerCropRegion.top =
2959            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2960        scalerCropRegion.width =
2961            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2962        scalerCropRegion.height =
2963            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2964        rc = AddSetParmEntryToBatch(mParameters,
2965                CAM_INTF_META_SCALER_CROP_REGION,
2966                sizeof(scalerCropRegion), &scalerCropRegion);
2967        scalerCropSet = true;
2968    }
2969
2970    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2971        int64_t sensorExpTime =
2972            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2973        rc = AddSetParmEntryToBatch(mParameters,
2974                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2975                sizeof(sensorExpTime), &sensorExpTime);
2976    }
2977
2978    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2979        int64_t sensorFrameDuration =
2980            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2981        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
2982            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
2983        rc = AddSetParmEntryToBatch(mParameters,
2984                CAM_INTF_META_SENSOR_FRAME_DURATION,
2985                sizeof(sensorFrameDuration), &sensorFrameDuration);
2986    }
2987
2988    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2989        int32_t sensorSensitivity =
2990            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2991        if (sensorSensitivity <
2992                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
2993            sensorSensitivity =
2994                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
2995        if (sensorSensitivity >
2996                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
2997            sensorSensitivity =
2998                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
2999        rc = AddSetParmEntryToBatch(mParameters,
3000                CAM_INTF_META_SENSOR_SENSITIVITY,
3001                sizeof(sensorSensitivity), &sensorSensitivity);
3002    }
3003
3004    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3005        int32_t shadingMode =
3006            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3007        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3008                sizeof(shadingMode), &shadingMode);
3009    }
3010
3011    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3012        uint8_t shadingStrength =
3013            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3014        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3015                sizeof(shadingStrength), &shadingStrength);
3016    }
3017
3018    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3019        uint8_t facedetectMode =
3020            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3021        rc = AddSetParmEntryToBatch(mParameters,
3022                CAM_INTF_META_STATS_FACEDETECT_MODE,
3023                sizeof(facedetectMode), &facedetectMode);
3024    }
3025
3026    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3027        uint8_t histogramMode =
3028            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3029        rc = AddSetParmEntryToBatch(mParameters,
3030                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3031                sizeof(histogramMode), &histogramMode);
3032    }
3033
3034    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3035        uint8_t sharpnessMapMode =
3036            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3037        rc = AddSetParmEntryToBatch(mParameters,
3038                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3039                sizeof(sharpnessMapMode), &sharpnessMapMode);
3040    }
3041
3042    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3043        uint8_t tonemapMode =
3044            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3045        rc = AddSetParmEntryToBatch(mParameters,
3046                CAM_INTF_META_TONEMAP_MODE,
3047                sizeof(tonemapMode), &tonemapMode);
3048    }
3049    int point = 0;
3050    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3051        cam_tonemap_curve_t tonemapCurveBlue;
3052        tonemapCurveBlue.tonemap_points_cnt =
3053           gCamCapability[mCameraId]->max_tone_map_curve_points;
3054        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3055            for (int j = 0; j < 2; j++) {
3056               tonemapCurveBlue.tonemap_points[i][j] =
3057                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3058               point++;
3059            }
3060        }
3061        rc = AddSetParmEntryToBatch(mParameters,
3062                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3063                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3064    }
3065    point = 0;
3066    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3067        cam_tonemap_curve_t tonemapCurveGreen;
3068        tonemapCurveGreen.tonemap_points_cnt =
3069           gCamCapability[mCameraId]->max_tone_map_curve_points;
3070        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3071            for (int j = 0; j < 2; j++) {
3072               tonemapCurveGreen.tonemap_points[i][j] =
3073                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3074               point++;
3075            }
3076        }
3077        rc = AddSetParmEntryToBatch(mParameters,
3078                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3079                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3080    }
3081    point = 0;
3082    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3083        cam_tonemap_curve_t tonemapCurveRed;
3084        tonemapCurveRed.tonemap_points_cnt =
3085           gCamCapability[mCameraId]->max_tone_map_curve_points;
3086        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3087            for (int j = 0; j < 2; j++) {
3088               tonemapCurveRed.tonemap_points[i][j] =
3089                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3090               point++;
3091            }
3092        }
3093        rc = AddSetParmEntryToBatch(mParameters,
3094                CAM_INTF_META_TONEMAP_CURVE_RED,
3095                sizeof(tonemapCurveRed), &tonemapCurveRed);
3096    }
3097
3098    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3099        uint8_t captureIntent =
3100            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3101        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3102                sizeof(captureIntent), &captureIntent);
3103    }
3104
3105    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3106        uint8_t blackLevelLock =
3107            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3108        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3109                sizeof(blackLevelLock), &blackLevelLock);
3110    }
3111
3112    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3113        uint8_t lensShadingMapMode =
3114            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3115        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3116                sizeof(lensShadingMapMode), &lensShadingMapMode);
3117    }
3118
3119    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3120        cam_area_t roi;
3121        bool reset = true;
3122        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
3123        if (scalerCropSet) {
3124            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3125        }
3126        if (reset) {
3127            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3128                    sizeof(roi), &roi);
3129        }
3130    }
3131
3132    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3133        cam_area_t roi;
3134        bool reset = true;
3135        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
3136        if (scalerCropSet) {
3137            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3138        }
3139        if (reset) {
3140            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3141                    sizeof(roi), &roi);
3142        }
3143    }
3144
3145    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3146        cam_area_t roi;
3147        bool reset = true;
3148        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
3149        if (scalerCropSet) {
3150            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3151        }
3152        if (reset) {
3153            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3154                    sizeof(roi), &roi);
3155        }
3156    }
3157    return rc;
3158}
3159
3160/*===========================================================================
3161 * FUNCTION   : getJpegSettings
3162 *
3163 * DESCRIPTION: save the jpeg settings in the HAL
3164 *
3165 *
3166 * PARAMETERS :
3167 *   @settings  : frame settings information from framework
3168 *
3169 *
3170 * RETURN     : success: NO_ERROR
3171 *              failure:
3172 *==========================================================================*/
3173int QCamera3HardwareInterface::getJpegSettings
3174                                  (const camera_metadata_t *settings)
3175{
3176    if (mJpegSettings) {
3177        if (mJpegSettings->gps_timestamp) {
3178            free(mJpegSettings->gps_timestamp);
3179            mJpegSettings->gps_timestamp = NULL;
3180        }
3181        if (mJpegSettings->gps_coordinates) {
3182            for (int i = 0; i < 3; i++) {
3183                free(mJpegSettings->gps_coordinates[i]);
3184                mJpegSettings->gps_coordinates[i] = NULL;
3185            }
3186        }
3187        free(mJpegSettings);
3188        mJpegSettings = NULL;
3189    }
3190    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3191    CameraMetadata jpeg_settings;
3192    jpeg_settings = settings;
3193
3194    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3195        mJpegSettings->jpeg_orientation =
3196            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3197    } else {
3198        mJpegSettings->jpeg_orientation = 0;
3199    }
3200    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3201        mJpegSettings->jpeg_quality =
3202            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3203    } else {
3204        mJpegSettings->jpeg_quality = 85;
3205    }
3206    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3207        mJpegSettings->thumbnail_size.width =
3208            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3209        mJpegSettings->thumbnail_size.height =
3210            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3211    } else {
3212        mJpegSettings->thumbnail_size.width = 0;
3213        mJpegSettings->thumbnail_size.height = 0;
3214    }
3215    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3216        for (int i = 0; i < 3; i++) {
3217            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3218            *(mJpegSettings->gps_coordinates[i]) =
3219                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3220        }
3221    } else{
3222       for (int i = 0; i < 3; i++) {
3223            mJpegSettings->gps_coordinates[i] = NULL;
3224        }
3225    }
3226
3227    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3228        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3229        *(mJpegSettings->gps_timestamp) =
3230            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3231    } else {
3232        mJpegSettings->gps_timestamp = NULL;
3233    }
3234
3235    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3236        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3237        for (int i = 0; i < len; i++) {
3238            mJpegSettings->gps_processing_method[i] =
3239                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3240        }
3241        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3242            mJpegSettings->gps_processing_method[len] = '\0';
3243        }
3244    } else {
3245        mJpegSettings->gps_processing_method[0] = '\0';
3246    }
3247
3248    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3249        mJpegSettings->sensor_sensitivity =
3250            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3251    } else {
3252        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3253    }
3254
3255    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3256        mJpegSettings->lens_focal_length =
3257            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3258    }
3259    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3260        mJpegSettings->exposure_compensation =
3261            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3262    }
3263    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3264    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3265    mJpegSettings->is_jpeg_format = true;
3266    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3267    return 0;
3268}
3269
3270/*===========================================================================
3271 * FUNCTION   : captureResultCb
3272 *
3273 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3274 *
3275 * PARAMETERS :
3276 *   @frame  : frame information from mm-camera-interface
3277 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3278 *   @userdata: userdata
3279 *
3280 * RETURN     : NONE
3281 *==========================================================================*/
3282void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3283                camera3_stream_buffer_t *buffer,
3284                uint32_t frame_number, void *userdata)
3285{
3286    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3287    if (hw == NULL) {
3288        ALOGE("%s: Invalid hw %p", __func__, hw);
3289        return;
3290    }
3291
3292    hw->captureResultCb(metadata, buffer, frame_number);
3293    return;
3294}
3295
3296
3297/*===========================================================================
3298 * FUNCTION   : initialize
3299 *
3300 * DESCRIPTION: Pass framework callback pointers to HAL
3301 *
3302 * PARAMETERS :
3303 *
3304 *
3305 * RETURN     : Success : 0
3306 *              Failure: -ENODEV
3307 *==========================================================================*/
3308
3309int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3310                                  const camera3_callback_ops_t *callback_ops)
3311{
3312    ALOGV("%s: E", __func__);
3313    QCamera3HardwareInterface *hw =
3314        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3315    if (!hw) {
3316        ALOGE("%s: NULL camera device", __func__);
3317        return -ENODEV;
3318    }
3319
3320    int rc = hw->initialize(callback_ops);
3321    ALOGV("%s: X", __func__);
3322    return rc;
3323}
3324
3325/*===========================================================================
3326 * FUNCTION   : configure_streams
3327 *
3328 * DESCRIPTION:
3329 *
3330 * PARAMETERS :
3331 *
3332 *
3333 * RETURN     : Success: 0
3334 *              Failure: -EINVAL (if stream configuration is invalid)
3335 *                       -ENODEV (fatal error)
3336 *==========================================================================*/
3337
3338int QCamera3HardwareInterface::configure_streams(
3339        const struct camera3_device *device,
3340        camera3_stream_configuration_t *stream_list)
3341{
3342    ALOGV("%s: E", __func__);
3343    QCamera3HardwareInterface *hw =
3344        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3345    if (!hw) {
3346        ALOGE("%s: NULL camera device", __func__);
3347        return -ENODEV;
3348    }
3349    int rc = hw->configureStreams(stream_list);
3350    ALOGV("%s: X", __func__);
3351    return rc;
3352}
3353
3354/*===========================================================================
3355 * FUNCTION   : register_stream_buffers
3356 *
3357 * DESCRIPTION: Register stream buffers with the device
3358 *
3359 * PARAMETERS :
3360 *
3361 * RETURN     :
3362 *==========================================================================*/
3363int QCamera3HardwareInterface::register_stream_buffers(
3364        const struct camera3_device *device,
3365        const camera3_stream_buffer_set_t *buffer_set)
3366{
3367    ALOGV("%s: E", __func__);
3368    QCamera3HardwareInterface *hw =
3369        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3370    if (!hw) {
3371        ALOGE("%s: NULL camera device", __func__);
3372        return -ENODEV;
3373    }
3374    int rc = hw->registerStreamBuffers(buffer_set);
3375    ALOGV("%s: X", __func__);
3376    return rc;
3377}
3378
3379/*===========================================================================
3380 * FUNCTION   : construct_default_request_settings
3381 *
3382 * DESCRIPTION: Configure a settings buffer to meet the required use case
3383 *
3384 * PARAMETERS :
3385 *
3386 *
3387 * RETURN     : Success: Return valid metadata
3388 *              Failure: Return NULL
3389 *==========================================================================*/
3390const camera_metadata_t* QCamera3HardwareInterface::
3391    construct_default_request_settings(const struct camera3_device *device,
3392                                        int type)
3393{
3394
3395    ALOGV("%s: E", __func__);
3396    camera_metadata_t* fwk_metadata = NULL;
3397    QCamera3HardwareInterface *hw =
3398        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3399    if (!hw) {
3400        ALOGE("%s: NULL camera device", __func__);
3401        return NULL;
3402    }
3403
3404    fwk_metadata = hw->translateCapabilityToMetadata(type);
3405
3406    ALOGV("%s: X", __func__);
3407    return fwk_metadata;
3408}
3409
3410/*===========================================================================
3411 * FUNCTION   : process_capture_request
3412 *
3413 * DESCRIPTION:
3414 *
3415 * PARAMETERS :
3416 *
3417 *
3418 * RETURN     :
3419 *==========================================================================*/
3420int QCamera3HardwareInterface::process_capture_request(
3421                    const struct camera3_device *device,
3422                    camera3_capture_request_t *request)
3423{
3424    ALOGV("%s: E", __func__);
3425    QCamera3HardwareInterface *hw =
3426        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3427    if (!hw) {
3428        ALOGE("%s: NULL camera device", __func__);
3429        return -EINVAL;
3430    }
3431
3432    int rc = hw->processCaptureRequest(request);
3433    ALOGV("%s: X", __func__);
3434    return rc;
3435}
3436
3437/*===========================================================================
3438 * FUNCTION   : get_metadata_vendor_tag_ops
3439 *
3440 * DESCRIPTION:
3441 *
3442 * PARAMETERS :
3443 *
3444 *
3445 * RETURN     :
3446 *==========================================================================*/
3447
3448void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3449                const struct camera3_device *device,
3450                vendor_tag_query_ops_t* ops)
3451{
3452    ALOGV("%s: E", __func__);
3453    QCamera3HardwareInterface *hw =
3454        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3455    if (!hw) {
3456        ALOGE("%s: NULL camera device", __func__);
3457        return;
3458    }
3459
3460    hw->getMetadataVendorTagOps(ops);
3461    ALOGV("%s: X", __func__);
3462    return;
3463}
3464
3465/*===========================================================================
3466 * FUNCTION   : dump
3467 *
3468 * DESCRIPTION:
3469 *
3470 * PARAMETERS :
3471 *
3472 *
3473 * RETURN     :
3474 *==========================================================================*/
3475
3476void QCamera3HardwareInterface::dump(
3477                const struct camera3_device *device, int fd)
3478{
3479    ALOGV("%s: E", __func__);
3480    QCamera3HardwareInterface *hw =
3481        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3482    if (!hw) {
3483        ALOGE("%s: NULL camera device", __func__);
3484        return;
3485    }
3486
3487    hw->dump(fd);
3488    ALOGV("%s: X", __func__);
3489    return;
3490}
3491
3492/*===========================================================================
3493 * FUNCTION   : close_camera_device
3494 *
3495 * DESCRIPTION:
3496 *
3497 * PARAMETERS :
3498 *
3499 *
3500 * RETURN     :
3501 *==========================================================================*/
3502int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3503{
3504    ALOGV("%s: E", __func__);
3505    int ret = NO_ERROR;
3506    QCamera3HardwareInterface *hw =
3507        reinterpret_cast<QCamera3HardwareInterface *>(
3508            reinterpret_cast<camera3_device_t *>(device)->priv);
3509    if (!hw) {
3510        ALOGE("NULL camera device");
3511        return BAD_VALUE;
3512    }
3513    delete hw;
3514
3515    pthread_mutex_lock(&mCameraSessionLock);
3516    mCameraSessionActive = 0;
3517    pthread_mutex_unlock(&mCameraSessionLock);
3518    ALOGV("%s: X", __func__);
3519    return ret;
3520}
3521
3522/*===========================================================================
3523 * FUNCTION   : getWaveletDenoiseProcessPlate
3524 *
3525 * DESCRIPTION: query wavelet denoise process plate
3526 *
3527 * PARAMETERS : None
3528 *
3529 * RETURN     : WNR prcocess plate vlaue
3530 *==========================================================================*/
3531cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3532{
3533    char prop[PROPERTY_VALUE_MAX];
3534    memset(prop, 0, sizeof(prop));
3535    property_get("persist.denoise.process.plates", prop, "0");
3536    int processPlate = atoi(prop);
3537    switch(processPlate) {
3538    case 0:
3539        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3540    case 1:
3541        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3542    case 2:
3543        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3544    case 3:
3545        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3546    default:
3547        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3548    }
3549}
3550
3551/*===========================================================================
3552 * FUNCTION   : needRotationReprocess
3553 *
3554 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3555 *
3556 * PARAMETERS : none
3557 *
3558 * RETURN     : true: needed
3559 *              false: no need
3560 *==========================================================================*/
3561bool QCamera3HardwareInterface::needRotationReprocess()
3562{
3563
3564    if (!mJpegSettings->is_jpeg_format) {
3565        // RAW image, no need to reprocess
3566        return false;
3567    }
3568
3569    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3570        mJpegSettings->jpeg_orientation > 0) {
3571        // current rotation is not zero, and pp has the capability to process rotation
3572        ALOGD("%s: need do reprocess for rotation", __func__);
3573        return true;
3574    }
3575
3576    return false;
3577}
3578
3579/*===========================================================================
3580 * FUNCTION   : needReprocess
3581 *
3582 * DESCRIPTION: if reprocess in needed
3583 *
3584 * PARAMETERS : none
3585 *
3586 * RETURN     : true: needed
3587 *              false: no need
3588 *==========================================================================*/
3589bool QCamera3HardwareInterface::needReprocess()
3590{
3591    if (!mJpegSettings->is_jpeg_format) {
3592        // RAW image, no need to reprocess
3593        return false;
3594    }
3595
3596    if ((mJpegSettings->min_required_pp_mask > 0) ||
3597         isWNREnabled()) {
3598        // TODO: add for ZSL HDR later
3599        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3600        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3601        return true;
3602    }
3603    return needRotationReprocess();
3604}
3605
3606/*===========================================================================
3607 * FUNCTION   : addOnlineReprocChannel
3608 *
3609 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3610 *              coming from input channel
3611 *
3612 * PARAMETERS :
3613 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3614 *
3615 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3616 *==========================================================================*/
3617QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3618                                                      QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3619{
3620    int32_t rc = NO_ERROR;
3621    QCamera3ReprocessChannel *pChannel = NULL;
3622    if (pInputChannel == NULL) {
3623        ALOGE("%s: input channel obj is NULL", __func__);
3624        return NULL;
3625    }
3626
3627    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3628            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3629    if (NULL == pChannel) {
3630        ALOGE("%s: no mem for reprocess channel", __func__);
3631        return NULL;
3632    }
3633
3634    // Capture channel, only need snapshot and postview streams start together
3635    mm_camera_channel_attr_t attr;
3636    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3637    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3638    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3639    rc = pChannel->initialize();
3640    if (rc != NO_ERROR) {
3641        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3642        delete pChannel;
3643        return NULL;
3644    }
3645
3646    // pp feature config
3647    cam_pp_feature_config_t pp_config;
3648    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3649    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3650        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3651        pp_config.sharpness = 10;
3652    }
3653
3654    if (isWNREnabled()) {
3655        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3656        pp_config.denoise2d.denoise_enable = 1;
3657        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3658    }
3659    if (needRotationReprocess()) {
3660        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3661        int rotation = mJpegSettings->jpeg_orientation;
3662        if (rotation == 0) {
3663            pp_config.rotation = ROTATE_0;
3664        } else if (rotation == 90) {
3665            pp_config.rotation = ROTATE_90;
3666        } else if (rotation == 180) {
3667            pp_config.rotation = ROTATE_180;
3668        } else if (rotation == 270) {
3669            pp_config.rotation = ROTATE_270;
3670        }
3671    }
3672
3673   rc = pChannel->addReprocStreamsFromSource(pp_config,
3674                                             pInputChannel,
3675                                             mMetadataChannel);
3676
3677    if (rc != NO_ERROR) {
3678        delete pChannel;
3679        return NULL;
3680    }
3681    return pChannel;
3682}
3683
3684int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3685{
3686    return gCamCapability[mCameraId]->min_num_pp_bufs;
3687}
3688
3689bool QCamera3HardwareInterface::isWNREnabled() {
3690    return gCamCapability[mCameraId]->isWnrSupported;
3691}
3692
3693}; //end namespace qcamera
3694