QCamera3HWI.cpp revision 34296c8a11c87ca1c00cfaedddd1f9089344ee80
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
87    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
88    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
89    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
91    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
92    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
93    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
94    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
95    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
96    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
97    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
98    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
99    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
100    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
101    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
105    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
106    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
107    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
108    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
110    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
111};
112
113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
121    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
124    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
126};
127
128const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
129    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
130    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
131    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
132};
133
134const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
136    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
137};
138
139const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
140                                             320, 240, 176, 144, 0, 0};
141
142camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
143    initialize:                         QCamera3HardwareInterface::initialize,
144    configure_streams:                  QCamera3HardwareInterface::configure_streams,
145    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
146    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
147    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
148    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
149    dump:                               QCamera3HardwareInterface::dump,
150    flush:                              QCamera3HardwareInterface::flush,
151    reserved:                           {0},
152};
153
154
155/*===========================================================================
156 * FUNCTION   : QCamera3HardwareInterface
157 *
158 * DESCRIPTION: constructor of QCamera3HardwareInterface
159 *
160 * PARAMETERS :
161 *   @cameraId  : camera ID
162 *
163 * RETURN     : none
164 *==========================================================================*/
165QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
166    : mCameraId(cameraId),
167      mCameraHandle(NULL),
168      mCameraOpened(false),
169      mCameraInitialized(false),
170      mCallbackOps(NULL),
171      mInputStream(NULL),
172      mMetadataChannel(NULL),
173      mPictureChannel(NULL),
174      mFirstRequest(false),
175      mParamHeap(NULL),
176      mParameters(NULL),
177      mJpegSettings(NULL),
178      mIsZslMode(false),
179      mMinProcessedFrameDuration(0),
180      mMinJpegFrameDuration(0),
181      mMinRawFrameDuration(0),
182      m_pPowerModule(NULL)
183{
184    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
185    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
186    mCameraDevice.common.close = close_camera_device;
187    mCameraDevice.ops = &mCameraOps;
188    mCameraDevice.priv = this;
189    gCamCapability[cameraId]->version = CAM_HAL_V3;
190    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
191    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
192    gCamCapability[cameraId]->min_num_pp_bufs = 3;
193
194    pthread_cond_init(&mRequestCond, NULL);
195    mPendingRequest = 0;
196    mCurrentRequestId = -1;
197    pthread_mutex_init(&mMutex, NULL);
198
199    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
200        mDefaultMetadata[i] = NULL;
201
202#ifdef HAS_MULTIMEDIA_HINTS
203    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
204        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
205    }
206#endif
207}
208
209/*===========================================================================
210 * FUNCTION   : ~QCamera3HardwareInterface
211 *
212 * DESCRIPTION: destructor of QCamera3HardwareInterface
213 *
214 * PARAMETERS : none
215 *
216 * RETURN     : none
217 *==========================================================================*/
218QCamera3HardwareInterface::~QCamera3HardwareInterface()
219{
220    ALOGV("%s: E", __func__);
221    /* We need to stop all streams before deleting any stream */
222        /*flush the metadata list*/
223    if (!mStoredMetadataList.empty()) {
224        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
225              m != mStoredMetadataList.end(); m++) {
226            mMetadataChannel->bufDone(m->meta_buf);
227            free(m->meta_buf);
228            m = mStoredMetadataList.erase(m);
229        }
230    }
231
232    // NOTE: 'camera3_stream_t *' objects are already freed at
233    //        this stage by the framework
234    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
235        it != mStreamInfo.end(); it++) {
236        QCamera3Channel *channel = (*it)->channel;
237        if (channel) {
238            channel->stop();
239        }
240    }
241
242    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
243        it != mStreamInfo.end(); it++) {
244        QCamera3Channel *channel = (*it)->channel;
245        if ((*it)->registered && (*it)->buffer_set.buffers) {
246             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
247        }
248        if (channel)
249            delete channel;
250        free (*it);
251    }
252
253    mPictureChannel = NULL;
254
255    if (mJpegSettings != NULL) {
256        free(mJpegSettings);
257        mJpegSettings = NULL;
258    }
259
260    /* Clean up all channels */
261    if (mCameraInitialized) {
262        if (mMetadataChannel) {
263            mMetadataChannel->stop();
264            delete mMetadataChannel;
265            mMetadataChannel = NULL;
266        }
267        deinitParameters();
268    }
269
270    if (mCameraOpened)
271        closeCamera();
272
273    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
274        if (mDefaultMetadata[i])
275            free_camera_metadata(mDefaultMetadata[i]);
276
277    pthread_cond_destroy(&mRequestCond);
278
279    pthread_mutex_destroy(&mMutex);
280    ALOGV("%s: X", __func__);
281}
282
283/*===========================================================================
284 * FUNCTION   : openCamera
285 *
286 * DESCRIPTION: open camera
287 *
288 * PARAMETERS :
289 *   @hw_device  : double ptr for camera device struct
290 *
291 * RETURN     : int32_t type of status
292 *              NO_ERROR  -- success
293 *              none-zero failure code
294 *==========================================================================*/
295int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
296{
297    int rc = 0;
298    pthread_mutex_lock(&mCameraSessionLock);
299    if (mCameraSessionActive) {
300        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
301        pthread_mutex_unlock(&mCameraSessionLock);
302        return -EUSERS;
303    }
304
305    if (mCameraOpened) {
306        *hw_device = NULL;
307        return PERMISSION_DENIED;
308    }
309
310    rc = openCamera();
311    if (rc == 0) {
312        *hw_device = &mCameraDevice.common;
313        mCameraSessionActive = 1;
314    } else
315        *hw_device = NULL;
316
317#ifdef HAS_MULTIMEDIA_HINTS
318    if (rc == 0) {
319        if (m_pPowerModule) {
320            if (m_pPowerModule->powerHint) {
321                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
322                        (void *)"state=1");
323            }
324        }
325    }
326#endif
327    pthread_mutex_unlock(&mCameraSessionLock);
328    return rc;
329}
330
331/*===========================================================================
332 * FUNCTION   : openCamera
333 *
334 * DESCRIPTION: open camera
335 *
336 * PARAMETERS : none
337 *
338 * RETURN     : int32_t type of status
339 *              NO_ERROR  -- success
340 *              none-zero failure code
341 *==========================================================================*/
342int QCamera3HardwareInterface::openCamera()
343{
344    if (mCameraHandle) {
345        ALOGE("Failure: Camera already opened");
346        return ALREADY_EXISTS;
347    }
348    mCameraHandle = camera_open(mCameraId);
349    if (!mCameraHandle) {
350        ALOGE("camera_open failed.");
351        return UNKNOWN_ERROR;
352    }
353
354    mCameraOpened = true;
355
356    return NO_ERROR;
357}
358
359/*===========================================================================
360 * FUNCTION   : closeCamera
361 *
362 * DESCRIPTION: close camera
363 *
364 * PARAMETERS : none
365 *
366 * RETURN     : int32_t type of status
367 *              NO_ERROR  -- success
368 *              none-zero failure code
369 *==========================================================================*/
370int QCamera3HardwareInterface::closeCamera()
371{
372    int rc = NO_ERROR;
373
374    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
375    mCameraHandle = NULL;
376    mCameraOpened = false;
377
378#ifdef HAS_MULTIMEDIA_HINTS
379    if (rc == NO_ERROR) {
380        if (m_pPowerModule) {
381            if (m_pPowerModule->powerHint) {
382                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
383                        (void *)"state=0");
384            }
385        }
386    }
387#endif
388
389    return rc;
390}
391
392/*===========================================================================
393 * FUNCTION   : initialize
394 *
395 * DESCRIPTION: Initialize frameworks callback functions
396 *
397 * PARAMETERS :
398 *   @callback_ops : callback function to frameworks
399 *
400 * RETURN     :
401 *
402 *==========================================================================*/
403int QCamera3HardwareInterface::initialize(
404        const struct camera3_callback_ops *callback_ops)
405{
406    int rc;
407
408    pthread_mutex_lock(&mMutex);
409
410    rc = initParameters();
411    if (rc < 0) {
412        ALOGE("%s: initParamters failed %d", __func__, rc);
413       goto err1;
414    }
415    mCallbackOps = callback_ops;
416
417    pthread_mutex_unlock(&mMutex);
418    mCameraInitialized = true;
419    return 0;
420
421err1:
422    pthread_mutex_unlock(&mMutex);
423    return rc;
424}
425
426/*===========================================================================
427 * FUNCTION   : configureStreams
428 *
429 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
430 *              and output streams.
431 *
432 * PARAMETERS :
433 *   @stream_list : streams to be configured
434 *
435 * RETURN     :
436 *
437 *==========================================================================*/
438int QCamera3HardwareInterface::configureStreams(
439        camera3_stream_configuration_t *streamList)
440{
441    int rc = 0;
442    mIsZslMode = false;
443
444    // Sanity check stream_list
445    if (streamList == NULL) {
446        ALOGE("%s: NULL stream configuration", __func__);
447        return BAD_VALUE;
448    }
449    if (streamList->streams == NULL) {
450        ALOGE("%s: NULL stream list", __func__);
451        return BAD_VALUE;
452    }
453
454    if (streamList->num_streams < 1) {
455        ALOGE("%s: Bad number of streams requested: %d", __func__,
456                streamList->num_streams);
457        return BAD_VALUE;
458    }
459
460    /* first invalidate all the steams in the mStreamList
461     * if they appear again, they will be validated */
462    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
463            it != mStreamInfo.end(); it++) {
464        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
465        channel->stop();
466        (*it)->status = INVALID;
467    }
468    if (mMetadataChannel) {
469        /* If content of mStreamInfo is not 0, there is metadata stream */
470        mMetadataChannel->stop();
471    }
472
473    pthread_mutex_lock(&mMutex);
474
475    camera3_stream_t *inputStream = NULL;
476    camera3_stream_t *jpegStream = NULL;
477    cam_stream_size_info_t stream_config_info;
478
479    for (size_t i = 0; i < streamList->num_streams; i++) {
480        camera3_stream_t *newStream = streamList->streams[i];
481        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
482                __func__, newStream->stream_type, newStream->format,
483                 newStream->width, newStream->height);
484        //if the stream is in the mStreamList validate it
485        bool stream_exists = false;
486        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
487                it != mStreamInfo.end(); it++) {
488            if ((*it)->stream == newStream) {
489                QCamera3Channel *channel =
490                    (QCamera3Channel*)(*it)->stream->priv;
491                stream_exists = true;
492                (*it)->status = RECONFIGURE;
493                /*delete the channel object associated with the stream because
494                  we need to reconfigure*/
495                delete channel;
496                (*it)->stream->priv = NULL;
497                (*it)->channel = NULL;
498            }
499        }
500        if (!stream_exists) {
501            //new stream
502            stream_info_t* stream_info;
503            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
504            stream_info->stream = newStream;
505            stream_info->status = VALID;
506            stream_info->registered = 0;
507            stream_info->channel = NULL;
508            mStreamInfo.push_back(stream_info);
509        }
510        if (newStream->stream_type == CAMERA3_STREAM_INPUT
511                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
512            if (inputStream != NULL) {
513                ALOGE("%s: Multiple input streams requested!", __func__);
514                pthread_mutex_unlock(&mMutex);
515                return BAD_VALUE;
516            }
517            inputStream = newStream;
518        }
519        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
520            jpegStream = newStream;
521        }
522    }
523    mInputStream = inputStream;
524
525    /*clean up invalid streams*/
526    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
527            it != mStreamInfo.end();) {
528        if(((*it)->status) == INVALID){
529            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
530            delete channel;
531            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
532            free(*it);
533            it = mStreamInfo.erase(it);
534        } else {
535            it++;
536        }
537    }
538    if (mMetadataChannel) {
539        delete mMetadataChannel;
540        mMetadataChannel = NULL;
541    }
542
543    //Create metadata channel and initialize it
544    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
545                    mCameraHandle->ops, captureResultCb,
546                    &gCamCapability[mCameraId]->padding_info, this);
547    if (mMetadataChannel == NULL) {
548        ALOGE("%s: failed to allocate metadata channel", __func__);
549        rc = -ENOMEM;
550        pthread_mutex_unlock(&mMutex);
551        return rc;
552    }
553    rc = mMetadataChannel->initialize();
554    if (rc < 0) {
555        ALOGE("%s: metadata channel initialization failed", __func__);
556        delete mMetadataChannel;
557        pthread_mutex_unlock(&mMutex);
558        return rc;
559    }
560
561    /* Allocate channel objects for the requested streams */
562    for (size_t i = 0; i < streamList->num_streams; i++) {
563        camera3_stream_t *newStream = streamList->streams[i];
564        uint32_t stream_usage = newStream->usage;
565        stream_config_info.stream_sizes[i].width = newStream->width;
566        stream_config_info.stream_sizes[i].height = newStream->height;
567        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
568            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
569            //for zsl stream the size is jpeg size
570            stream_config_info.stream_sizes[i].width = jpegStream->width;
571            stream_config_info.stream_sizes[i].height = jpegStream->height;
572            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
573        } else {
574           //for non zsl streams find out the format
575           switch (newStream->format) {
576           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
577              {
578                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
579                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
580                 } else {
581                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
582                 }
583              }
584              break;
585           case HAL_PIXEL_FORMAT_YCbCr_420_888:
586              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
587              break;
588           case HAL_PIXEL_FORMAT_BLOB:
589              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
590              break;
591           default:
592              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
593              break;
594           }
595        }
596        if (newStream->priv == NULL) {
597            //New stream, construct channel
598            switch (newStream->stream_type) {
599            case CAMERA3_STREAM_INPUT:
600                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
601                break;
602            case CAMERA3_STREAM_BIDIRECTIONAL:
603                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
604                    GRALLOC_USAGE_HW_CAMERA_WRITE;
605                break;
606            case CAMERA3_STREAM_OUTPUT:
607                /* For video encoding stream, set read/write rarely
608                 * flag so that they may be set to un-cached */
609                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
610                    newStream->usage =
611                         (GRALLOC_USAGE_SW_READ_RARELY |
612                         GRALLOC_USAGE_SW_WRITE_RARELY |
613                         GRALLOC_USAGE_HW_CAMERA_WRITE);
614                else
615                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
616                break;
617            default:
618                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
619                break;
620            }
621
622            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
623                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
624                QCamera3Channel *channel;
625                switch (newStream->format) {
626                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
627                case HAL_PIXEL_FORMAT_YCbCr_420_888:
628                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
629                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
630                        jpegStream) {
631                        uint32_t width = jpegStream->width;
632                        uint32_t height = jpegStream->height;
633                        mIsZslMode = true;
634                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
635                            mCameraHandle->ops, captureResultCb,
636                            &gCamCapability[mCameraId]->padding_info, this, newStream,
637                            width, height);
638                    } else
639                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
640                            mCameraHandle->ops, captureResultCb,
641                            &gCamCapability[mCameraId]->padding_info, this, newStream);
642                    if (channel == NULL) {
643                        ALOGE("%s: allocation of channel failed", __func__);
644                        pthread_mutex_unlock(&mMutex);
645                        return -ENOMEM;
646                    }
647
648                    newStream->priv = channel;
649                    break;
650                case HAL_PIXEL_FORMAT_BLOB:
651                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
652                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
653                            mCameraHandle->ops, captureResultCb,
654                            &gCamCapability[mCameraId]->padding_info, this, newStream);
655                    if (mPictureChannel == NULL) {
656                        ALOGE("%s: allocation of channel failed", __func__);
657                        pthread_mutex_unlock(&mMutex);
658                        return -ENOMEM;
659                    }
660                    newStream->priv = (QCamera3Channel*)mPictureChannel;
661                    break;
662
663                //TODO: Add support for app consumed format?
664                default:
665                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
666                    break;
667                }
668            }
669
670            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
671                    it != mStreamInfo.end(); it++) {
672                if ((*it)->stream == newStream) {
673                    (*it)->channel = (QCamera3Channel*) newStream->priv;
674                    break;
675                }
676            }
677        } else {
678            // Channel already exists for this stream
679            // Do nothing for now
680        }
681    }
682
683    int32_t hal_version = CAM_HAL_V3;
684    stream_config_info.num_streams = streamList->num_streams;
685
686    // settings/parameters don't carry over for new configureStreams
687    memset(mParameters, 0, sizeof(parm_buffer_t));
688
689    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
690    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
691                sizeof(hal_version), &hal_version);
692
693    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
694                sizeof(stream_config_info), &stream_config_info);
695
696    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
697
698    /*For the streams to be reconfigured we need to register the buffers
699      since the framework wont*/
700    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
701            it != mStreamInfo.end(); it++) {
702        if ((*it)->status == RECONFIGURE) {
703            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
704            /*only register buffers for streams that have already been
705              registered*/
706            if ((*it)->registered) {
707                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
708                        (*it)->buffer_set.buffers);
709                if (rc != NO_ERROR) {
710                    ALOGE("%s: Failed to register the buffers of old stream,\
711                            rc = %d", __func__, rc);
712                }
713                ALOGV("%s: channel %p has %d buffers",
714                        __func__, channel, (*it)->buffer_set.num_buffers);
715            }
716        }
717
718        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
719        if (index == NAME_NOT_FOUND) {
720            mPendingBuffersMap.add((*it)->stream, 0);
721        } else {
722            mPendingBuffersMap.editValueAt(index) = 0;
723        }
724    }
725
726    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
727    mPendingRequestsList.clear();
728
729    mPendingFrameDropList.clear();
730
731    /*flush the metadata list*/
732    if (!mStoredMetadataList.empty()) {
733        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
734              m != mStoredMetadataList.end(); m++) {
735            mMetadataChannel->bufDone(m->meta_buf);
736            free(m->meta_buf);
737            m = mStoredMetadataList.erase(m);
738        }
739    }
740
741    mFirstRequest = true;
742
743    //Get min frame duration for this streams configuration
744    deriveMinFrameDuration();
745
746    pthread_mutex_unlock(&mMutex);
747    return rc;
748}
749
750/*===========================================================================
751 * FUNCTION   : validateCaptureRequest
752 *
753 * DESCRIPTION: validate a capture request from camera service
754 *
755 * PARAMETERS :
756 *   @request : request from framework to process
757 *
758 * RETURN     :
759 *
760 *==========================================================================*/
761int QCamera3HardwareInterface::validateCaptureRequest(
762                    camera3_capture_request_t *request)
763{
764    ssize_t idx = 0;
765    const camera3_stream_buffer_t *b;
766    CameraMetadata meta;
767
768    /* Sanity check the request */
769    if (request == NULL) {
770        ALOGE("%s: NULL capture request", __func__);
771        return BAD_VALUE;
772    }
773
774    uint32_t frameNumber = request->frame_number;
775    if (request->input_buffer != NULL &&
776            request->input_buffer->stream != mInputStream) {
777        ALOGE("%s: Request %d: Input buffer not from input stream!",
778                __FUNCTION__, frameNumber);
779        return BAD_VALUE;
780    }
781    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
782        ALOGE("%s: Request %d: No output buffers provided!",
783                __FUNCTION__, frameNumber);
784        return BAD_VALUE;
785    }
786    if (request->input_buffer != NULL) {
787        b = request->input_buffer;
788        QCamera3Channel *channel =
789            static_cast<QCamera3Channel*>(b->stream->priv);
790        if (channel == NULL) {
791            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
792                    __func__, frameNumber, idx);
793            return BAD_VALUE;
794        }
795        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
796            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
797                    __func__, frameNumber, idx);
798            return BAD_VALUE;
799        }
800        if (b->release_fence != -1) {
801            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
802                    __func__, frameNumber, idx);
803            return BAD_VALUE;
804        }
805        if (b->buffer == NULL) {
806            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
807                    __func__, frameNumber, idx);
808            return BAD_VALUE;
809        }
810    }
811
812    // Validate all buffers
813    b = request->output_buffers;
814    do {
815        QCamera3Channel *channel =
816                static_cast<QCamera3Channel*>(b->stream->priv);
817        if (channel == NULL) {
818            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
819                    __func__, frameNumber, idx);
820            return BAD_VALUE;
821        }
822        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
823            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
824                    __func__, frameNumber, idx);
825            return BAD_VALUE;
826        }
827        if (b->release_fence != -1) {
828            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
829                    __func__, frameNumber, idx);
830            return BAD_VALUE;
831        }
832        if (b->buffer == NULL) {
833            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
834                    __func__, frameNumber, idx);
835            return BAD_VALUE;
836        }
837        idx++;
838        b = request->output_buffers + idx;
839    } while (idx < (ssize_t)request->num_output_buffers);
840
841    return NO_ERROR;
842}
843
844/*===========================================================================
845 * FUNCTION   : deriveMinFrameDuration
846 *
847 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
848 *              on currently configured streams.
849 *
850 * PARAMETERS : NONE
851 *
852 * RETURN     : NONE
853 *
854 *==========================================================================*/
855void QCamera3HardwareInterface::deriveMinFrameDuration()
856{
857    int32_t maxJpegDimension, maxProcessedDimension;
858
859    maxJpegDimension = 0;
860    maxProcessedDimension = 0;
861
862    // Figure out maximum jpeg, processed, and raw dimensions
863    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
864        it != mStreamInfo.end(); it++) {
865
866        // Input stream doesn't have valid stream_type
867        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
868            continue;
869
870        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
871        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
872            if (dimension > maxJpegDimension)
873                maxJpegDimension = dimension;
874        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
875            if (dimension > maxProcessedDimension)
876                maxProcessedDimension = dimension;
877        }
878    }
879
880    //Assume all jpeg dimensions are in processed dimensions.
881    if (maxJpegDimension > maxProcessedDimension)
882        maxProcessedDimension = maxJpegDimension;
883
884    //Find minimum durations for processed, jpeg, and raw
885    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
886    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
887        if (maxProcessedDimension ==
888            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
889            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
890            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
891            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
892            break;
893        }
894    }
895}
896
897/*===========================================================================
898 * FUNCTION   : getMinFrameDuration
899 *
900 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
901 *              and current request configuration.
902 *
903 * PARAMETERS : @request: requset sent by the frameworks
904 *
905 * RETURN     : min farme duration for a particular request
906 *
907 *==========================================================================*/
908int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
909{
910    bool hasJpegStream = false;
911    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
912        const camera3_stream_t *stream = request->output_buffers[i].stream;
913        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
914            hasJpegStream = true;
915    }
916
917    if (!hasJpegStream)
918        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
919    else
920        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
921}
922
923/*===========================================================================
924 * FUNCTION   : registerStreamBuffers
925 *
926 * DESCRIPTION: Register buffers for a given stream with the HAL device.
927 *
928 * PARAMETERS :
929 *   @stream_list : streams to be configured
930 *
931 * RETURN     :
932 *
933 *==========================================================================*/
934int QCamera3HardwareInterface::registerStreamBuffers(
935        const camera3_stream_buffer_set_t *buffer_set)
936{
937    int rc = 0;
938
939    pthread_mutex_lock(&mMutex);
940
941    if (buffer_set == NULL) {
942        ALOGE("%s: Invalid buffer_set parameter.", __func__);
943        pthread_mutex_unlock(&mMutex);
944        return -EINVAL;
945    }
946    if (buffer_set->stream == NULL) {
947        ALOGE("%s: Invalid stream parameter.", __func__);
948        pthread_mutex_unlock(&mMutex);
949        return -EINVAL;
950    }
951    if (buffer_set->num_buffers < 1) {
952        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
953        pthread_mutex_unlock(&mMutex);
954        return -EINVAL;
955    }
956    if (buffer_set->buffers == NULL) {
957        ALOGE("%s: Invalid buffers parameter.", __func__);
958        pthread_mutex_unlock(&mMutex);
959        return -EINVAL;
960    }
961
962    camera3_stream_t *stream = buffer_set->stream;
963    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
964
965    //set the buffer_set in the mStreamInfo array
966    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
967            it != mStreamInfo.end(); it++) {
968        if ((*it)->stream == stream) {
969            uint32_t numBuffers = buffer_set->num_buffers;
970            (*it)->buffer_set.stream = buffer_set->stream;
971            (*it)->buffer_set.num_buffers = numBuffers;
972            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
973            if ((*it)->buffer_set.buffers == NULL) {
974                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
975                pthread_mutex_unlock(&mMutex);
976                return -ENOMEM;
977            }
978            for (size_t j = 0; j < numBuffers; j++){
979                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
980            }
981            (*it)->registered = 1;
982        }
983    }
984    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
985    if (rc < 0) {
986        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
987        pthread_mutex_unlock(&mMutex);
988        return -ENODEV;
989    }
990
991    pthread_mutex_unlock(&mMutex);
992    return NO_ERROR;
993}
994
995/*===========================================================================
996 * FUNCTION   : processCaptureRequest
997 *
998 * DESCRIPTION: process a capture request from camera service
999 *
1000 * PARAMETERS :
1001 *   @request : request from framework to process
1002 *
1003 * RETURN     :
1004 *
1005 *==========================================================================*/
1006int QCamera3HardwareInterface::processCaptureRequest(
1007                    camera3_capture_request_t *request)
1008{
1009    int rc = NO_ERROR;
1010    int32_t request_id;
1011    CameraMetadata meta;
1012    MetadataBufferInfo reproc_meta;
1013    int queueMetadata = 0;
1014
1015    pthread_mutex_lock(&mMutex);
1016
1017    rc = validateCaptureRequest(request);
1018    if (rc != NO_ERROR) {
1019        ALOGE("%s: incoming request is not valid", __func__);
1020        pthread_mutex_unlock(&mMutex);
1021        return rc;
1022    }
1023
1024    meta = request->settings;
1025
1026    // For first capture request, send capture intent, and
1027    // stream on all streams
1028    if (mFirstRequest) {
1029
1030        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1031            int32_t hal_version = CAM_HAL_V3;
1032            uint8_t captureIntent =
1033                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1034
1035            memset(mParameters, 0, sizeof(parm_buffer_t));
1036            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1037            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1038                sizeof(hal_version), &hal_version);
1039            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1040                sizeof(captureIntent), &captureIntent);
1041            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1042                mParameters);
1043        }
1044
1045        mMetadataChannel->start();
1046        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1047            it != mStreamInfo.end(); it++) {
1048            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1049            channel->start();
1050        }
1051    }
1052
1053    uint32_t frameNumber = request->frame_number;
1054    uint32_t streamTypeMask = 0;
1055
1056    if (meta.exists(ANDROID_REQUEST_ID)) {
1057        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1058        mCurrentRequestId = request_id;
1059        ALOGV("%s: Received request with id: %d",__func__, request_id);
1060    } else if (mFirstRequest || mCurrentRequestId == -1){
1061        ALOGE("%s: Unable to find request id field, \
1062                & no previous id available", __func__);
1063        return NAME_NOT_FOUND;
1064    } else {
1065        ALOGV("%s: Re-using old request id", __func__);
1066        request_id = mCurrentRequestId;
1067    }
1068
1069    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1070                                    __func__, __LINE__,
1071                                    request->num_output_buffers,
1072                                    request->input_buffer,
1073                                    frameNumber);
1074    // Acquire all request buffers first
1075    int blob_request = 0;
1076    for (size_t i = 0; i < request->num_output_buffers; i++) {
1077        const camera3_stream_buffer_t& output = request->output_buffers[i];
1078        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1079        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1080
1081        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1082        //Call function to store local copy of jpeg data for encode params.
1083            blob_request = 1;
1084            rc = getJpegSettings(request->settings);
1085            if (rc < 0) {
1086                ALOGE("%s: failed to get jpeg parameters", __func__);
1087                pthread_mutex_unlock(&mMutex);
1088                return rc;
1089            }
1090        }
1091
1092        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1093        if (rc != OK) {
1094            ALOGE("%s: fence wait failed %d", __func__, rc);
1095            pthread_mutex_unlock(&mMutex);
1096            return rc;
1097        }
1098        streamTypeMask |= channel->getStreamTypeMask();
1099    }
1100
1101    rc = setFrameParameters(request, streamTypeMask);
1102    if (rc < 0) {
1103        ALOGE("%s: fail to set frame parameters", __func__);
1104        pthread_mutex_unlock(&mMutex);
1105        return rc;
1106    }
1107
1108    /* Update pending request list and pending buffers map */
1109    PendingRequestInfo pendingRequest;
1110    pendingRequest.frame_number = frameNumber;
1111    pendingRequest.num_buffers = request->num_output_buffers;
1112    pendingRequest.request_id = request_id;
1113    pendingRequest.blob_request = blob_request;
1114    if (blob_request)
1115        pendingRequest.input_jpeg_settings = *mJpegSettings;
1116    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1117
1118    for (size_t i = 0; i < request->num_output_buffers; i++) {
1119        RequestedBufferInfo requestedBuf;
1120        requestedBuf.stream = request->output_buffers[i].stream;
1121        requestedBuf.buffer = NULL;
1122        pendingRequest.buffers.push_back(requestedBuf);
1123
1124        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1125    }
1126    mPendingRequestsList.push_back(pendingRequest);
1127
1128    // Notify metadata channel we receive a request
1129    mMetadataChannel->request(NULL, frameNumber);
1130
1131    // Call request on other streams
1132    for (size_t i = 0; i < request->num_output_buffers; i++) {
1133        const camera3_stream_buffer_t& output = request->output_buffers[i];
1134        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1135        mm_camera_buf_def_t *pInputBuffer = NULL;
1136
1137        if (channel == NULL) {
1138            ALOGE("%s: invalid channel pointer for stream", __func__);
1139            continue;
1140        }
1141
1142        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1143            QCamera3RegularChannel* inputChannel = NULL;
1144            if(request->input_buffer != NULL){
1145                //Try to get the internal format
1146                inputChannel = (QCamera3RegularChannel*)
1147                    request->input_buffer->stream->priv;
1148                if(inputChannel == NULL ){
1149                    ALOGE("%s: failed to get input channel handle", __func__);
1150                } else {
1151                    pInputBuffer =
1152                        inputChannel->getInternalFormatBuffer(
1153                                request->input_buffer->buffer);
1154                    ALOGD("%s: Input buffer dump",__func__);
1155                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1156                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1157                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1158                    ALOGD("Handle:%p", request->input_buffer->buffer);
1159                    //TODO: need to get corresponding metadata and send it to pproc
1160                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1161                         m != mStoredMetadataList.end(); m++) {
1162                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1163                            reproc_meta.meta_buf = m->meta_buf;
1164                            queueMetadata = 1;
1165                            break;
1166                        }
1167                    }
1168                }
1169            }
1170            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1171                            pInputBuffer,(QCamera3Channel*)inputChannel);
1172            if (queueMetadata) {
1173                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1174            }
1175        } else {
1176            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1177                __LINE__, output.buffer, frameNumber);
1178            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1179                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1180                     m != mStoredMetadataList.end(); m++) {
1181                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1182                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1183                            mMetadataChannel->bufDone(m->meta_buf);
1184                            free(m->meta_buf);
1185                            m = mStoredMetadataList.erase(m);
1186                            break;
1187                        }
1188                   }
1189                }
1190            }
1191            rc = channel->request(output.buffer, frameNumber);
1192        }
1193        if (rc < 0)
1194            ALOGE("%s: request failed", __func__);
1195    }
1196
1197    mFirstRequest = false;
1198    // Added a timed condition wait
1199    struct timespec ts;
1200    uint8_t isValidTimeout = 1;
1201    rc = clock_gettime(CLOCK_REALTIME, &ts);
1202    if (rc < 0) {
1203        isValidTimeout = 0;
1204        ALOGE("%s: Error reading the real time clock!!", __func__);
1205    }
1206    else {
1207        // Make timeout as 5 sec for request to be honored
1208        ts.tv_sec += 5;
1209    }
1210    //Block on conditional variable
1211    mPendingRequest = 1;
1212    while (mPendingRequest == 1) {
1213        if (!isValidTimeout) {
1214            ALOGV("%s: Blocking on conditional wait", __func__);
1215            pthread_cond_wait(&mRequestCond, &mMutex);
1216        }
1217        else {
1218            ALOGV("%s: Blocking on timed conditional wait", __func__);
1219            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1220            if (rc == ETIMEDOUT) {
1221                rc = -ENODEV;
1222                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1223                break;
1224            }
1225        }
1226        ALOGV("%s: Unblocked", __func__);
1227    }
1228
1229    pthread_mutex_unlock(&mMutex);
1230
1231    return rc;
1232}
1233
1234/*===========================================================================
1235 * FUNCTION   : getMetadataVendorTagOps
1236 *
1237 * DESCRIPTION:
1238 *
1239 * PARAMETERS :
1240 *
1241 *
1242 * RETURN     :
1243 *==========================================================================*/
1244void QCamera3HardwareInterface::getMetadataVendorTagOps(
1245                    vendor_tag_query_ops_t* /*ops*/)
1246{
1247    /* Enable locks when we eventually add Vendor Tags */
1248    /*
1249    pthread_mutex_lock(&mMutex);
1250
1251    pthread_mutex_unlock(&mMutex);
1252    */
1253    return;
1254}
1255
1256/*===========================================================================
1257 * FUNCTION   : dump
1258 *
1259 * DESCRIPTION:
1260 *
1261 * PARAMETERS :
1262 *
1263 *
1264 * RETURN     :
1265 *==========================================================================*/
1266void QCamera3HardwareInterface::dump(int /*fd*/)
1267{
1268    /*Enable lock when we implement this function*/
1269    /*
1270    pthread_mutex_lock(&mMutex);
1271
1272    pthread_mutex_unlock(&mMutex);
1273    */
1274    return;
1275}
1276
1277/*===========================================================================
1278 * FUNCTION   : flush
1279 *
1280 * DESCRIPTION:
1281 *
1282 * PARAMETERS :
1283 *
1284 *
1285 * RETURN     :
1286 *==========================================================================*/
1287int QCamera3HardwareInterface::flush()
1288{
1289    /*Enable lock when we implement this function*/
1290    /*
1291    pthread_mutex_lock(&mMutex);
1292
1293    pthread_mutex_unlock(&mMutex);
1294    */
1295    return 0;
1296}
1297
1298/*===========================================================================
1299 * FUNCTION   : captureResultCb
1300 *
1301 * DESCRIPTION: Callback handler for all capture result
1302 *              (streams, as well as metadata)
1303 *
1304 * PARAMETERS :
1305 *   @metadata : metadata information
1306 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1307 *               NULL if metadata.
1308 *
1309 * RETURN     : NONE
1310 *==========================================================================*/
1311void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1312                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1313{
1314    pthread_mutex_lock(&mMutex);
1315
1316    if (metadata_buf) {
1317        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1318        int32_t frame_number_valid = *(int32_t *)
1319            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1320        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1321            CAM_INTF_META_PENDING_REQUESTS, metadata);
1322        uint32_t frame_number = *(uint32_t *)
1323            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1324        const struct timeval *tv = (const struct timeval *)
1325            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1326        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1327            tv->tv_usec * NSEC_PER_USEC;
1328        cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1329            POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1330
1331        if (!frame_number_valid) {
1332            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1333            mMetadataChannel->bufDone(metadata_buf);
1334            free(metadata_buf);
1335            goto done_metadata;
1336        }
1337        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1338                frame_number, capture_time);
1339
1340        // Go through the pending requests info and send shutter/results to frameworks
1341        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1342                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1343            camera3_capture_result_t result;
1344            camera3_notify_msg_t notify_msg;
1345            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1346
1347            // Flush out all entries with less or equal frame numbers.
1348
1349            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1350            //Right now it's the same as metadata timestamp
1351
1352            //TODO: When there is metadata drop, how do we derive the timestamp of
1353            //dropped frames? For now, we fake the dropped timestamp by substracting
1354            //from the reported timestamp
1355            nsecs_t current_capture_time = capture_time -
1356                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1357
1358            // Send shutter notify to frameworks
1359            notify_msg.type = CAMERA3_MSG_SHUTTER;
1360            notify_msg.message.shutter.frame_number = i->frame_number;
1361            notify_msg.message.shutter.timestamp = current_capture_time;
1362            mCallbackOps->notify(mCallbackOps, &notify_msg);
1363            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1364                    i->frame_number, capture_time);
1365
1366            // Check whether any stream buffer corresponding to this is dropped or not
1367            // If dropped, then send the ERROR_BUFFER for the corresponding stream
1368            if (cam_frame_drop.frame_dropped) {
1369                camera3_notify_msg_t notify_msg;
1370                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1371                        j != i->buffers.end(); j++) {
1372                    QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1373                    uint32_t streamTypeMask = channel->getStreamTypeMask();
1374                    if (streamTypeMask & cam_frame_drop.stream_type_mask) {
1375                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1376                        ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
1377                               __func__, i->frame_number, streamTypeMask);
1378                        notify_msg.type = CAMERA3_MSG_ERROR;
1379                        notify_msg.message.error.frame_number = i->frame_number;
1380                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1381                        notify_msg.message.error.error_stream = j->stream;
1382                        mCallbackOps->notify(mCallbackOps, &notify_msg);
1383                        ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
1384                               __func__, i->frame_number, streamTypeMask);
1385                        PendingFrameDropInfo PendingFrameDrop;
1386                        PendingFrameDrop.frame_number=i->frame_number;
1387                        PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
1388                        // Add the Frame drop info to mPendingFrameDropList
1389                        mPendingFrameDropList.push_back(PendingFrameDrop);
1390                    }
1391                }
1392            }
1393
1394            // Send empty metadata with already filled buffers for dropped metadata
1395            // and send valid metadata with already filled buffers for current metadata
1396            if (i->frame_number < frame_number) {
1397                CameraMetadata dummyMetadata;
1398                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1399                        &current_capture_time, 1);
1400                dummyMetadata.update(ANDROID_REQUEST_ID,
1401                        &(i->request_id), 1);
1402                result.result = dummyMetadata.release();
1403            } else {
1404                result.result = translateCbMetadataToResultMetadata(metadata,
1405                        current_capture_time, i->request_id, i->blob_request,
1406                        &(i->input_jpeg_settings));
1407                if (mIsZslMode) {
1408                   int found_metadata = 0;
1409                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1410                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1411                        j != i->buffers.end(); j++) {
1412                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1413                         //check if corresp. zsl already exists in the stored metadata list
1414                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1415                               m != mStoredMetadataList.begin(); m++) {
1416                            if (m->frame_number == frame_number) {
1417                               m->meta_buf = metadata_buf;
1418                               found_metadata = 1;
1419                               break;
1420                            }
1421                         }
1422                         if (!found_metadata) {
1423                            MetadataBufferInfo store_meta_info;
1424                            store_meta_info.meta_buf = metadata_buf;
1425                            store_meta_info.frame_number = frame_number;
1426                            mStoredMetadataList.push_back(store_meta_info);
1427                            found_metadata = 1;
1428                         }
1429                      }
1430                   }
1431                   if (!found_metadata) {
1432                       if (!i->input_buffer_present && i->blob_request) {
1433                          //livesnapshot or fallback non-zsl snapshot case
1434                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1435                                j != i->buffers.end(); j++){
1436                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1437                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1438                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1439                                 break;
1440                              }
1441                         }
1442                       } else {
1443                            //return the metadata immediately
1444                            mMetadataChannel->bufDone(metadata_buf);
1445                            free(metadata_buf);
1446                       }
1447                   }
1448               } else if (!mIsZslMode && i->blob_request) {
1449                   //If it is a blob request then send the metadata to the picture channel
1450                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1451               } else {
1452                   // Return metadata buffer
1453                   mMetadataChannel->bufDone(metadata_buf);
1454                   free(metadata_buf);
1455               }
1456
1457            }
1458            if (!result.result) {
1459                ALOGE("%s: metadata is NULL", __func__);
1460            }
1461            result.frame_number = i->frame_number;
1462            result.num_output_buffers = 0;
1463            result.output_buffers = NULL;
1464            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1465                    j != i->buffers.end(); j++) {
1466                if (j->buffer) {
1467                    result.num_output_buffers++;
1468                }
1469            }
1470
1471            if (result.num_output_buffers > 0) {
1472                camera3_stream_buffer_t *result_buffers =
1473                    new camera3_stream_buffer_t[result.num_output_buffers];
1474                if (!result_buffers) {
1475                    ALOGE("%s: Fatal error: out of memory", __func__);
1476                }
1477                size_t result_buffers_idx = 0;
1478                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1479                     j != i->buffers.end(); j++) {
1480                     if (j->buffer) {
1481                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1482                              m != mPendingFrameDropList.end(); m++) {
1483                              QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1484                              uint32_t streamTypeMask = channel->getStreamTypeMask();
1485                              if((m->stream_type_mask & streamTypeMask) &&
1486                                  (m->frame_number==frame_number)) {
1487                                  j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1488                                  ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1489                                        __func__, frame_number, streamTypeMask);
1490                                  m = mPendingFrameDropList.erase(m);
1491                                  break;
1492                              }
1493                         }
1494                         result_buffers[result_buffers_idx++] = *(j->buffer);
1495                         free(j->buffer);
1496                         j->buffer = NULL;
1497                         mPendingBuffersMap.editValueFor(j->stream)--;
1498                    }
1499                }
1500                result.output_buffers = result_buffers;
1501
1502                mCallbackOps->process_capture_result(mCallbackOps, &result);
1503                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1504                        __func__, result.frame_number, current_capture_time);
1505                free_camera_metadata((camera_metadata_t *)result.result);
1506                delete[] result_buffers;
1507            } else {
1508                mCallbackOps->process_capture_result(mCallbackOps, &result);
1509                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1510                        __func__, result.frame_number, current_capture_time);
1511                free_camera_metadata((camera_metadata_t *)result.result);
1512            }
1513            // erase the element from the list
1514            i = mPendingRequestsList.erase(i);
1515        }
1516
1517
1518done_metadata:
1519        bool max_buffers_dequeued = false;
1520        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1521            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1522            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1523            if (queued_buffers == stream->max_buffers) {
1524                max_buffers_dequeued = true;
1525                break;
1526            }
1527        }
1528        if (!max_buffers_dequeued && !pending_requests) {
1529            // Unblock process_capture_request
1530            mPendingRequest = 0;
1531            pthread_cond_signal(&mRequestCond);
1532        }
1533    } else {
1534        // If the frame number doesn't exist in the pending request list,
1535        // directly send the buffer to the frameworks, and update pending buffers map
1536        // Otherwise, book-keep the buffer.
1537        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1538        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1539            i++;
1540        }
1541        if (i == mPendingRequestsList.end()) {
1542            // Verify all pending requests frame_numbers are greater
1543            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1544                    j != mPendingRequestsList.end(); j++) {
1545                if (j->frame_number < frame_number) {
1546                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1547                            __func__, j->frame_number, frame_number);
1548                }
1549            }
1550            camera3_capture_result_t result;
1551            result.result = NULL;
1552            result.frame_number = frame_number;
1553            result.num_output_buffers = 1;
1554            for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1555                  m != mPendingFrameDropList.end(); m++) {
1556                QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1557                uint32_t streamTypeMask = channel->getStreamTypeMask();
1558                if((m->stream_type_mask & streamTypeMask) &&
1559                    (m->frame_number==frame_number) ) {
1560                    buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1561                    ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1562                            __func__, frame_number, streamTypeMask);
1563                    m = mPendingFrameDropList.erase(m);
1564                    break;
1565                }
1566            }
1567            result.output_buffers = buffer;
1568            ALOGV("%s: result frame_number = %d, buffer = %p",
1569                    __func__, frame_number, buffer);
1570            mPendingBuffersMap.editValueFor(buffer->stream)--;
1571            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1572                int found = 0;
1573                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1574                      k != mStoredMetadataList.end(); k++) {
1575                    if (k->frame_number == frame_number) {
1576                        k->zsl_buf_hdl = buffer->buffer;
1577                        found = 1;
1578                        break;
1579                    }
1580                }
1581                if (!found) {
1582                   MetadataBufferInfo meta_info;
1583                   meta_info.frame_number = frame_number;
1584                   meta_info.zsl_buf_hdl = buffer->buffer;
1585                   mStoredMetadataList.push_back(meta_info);
1586                }
1587            }
1588            mCallbackOps->process_capture_result(mCallbackOps, &result);
1589        } else {
1590            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1591                    j != i->buffers.end(); j++) {
1592                if (j->stream == buffer->stream) {
1593                    if (j->buffer != NULL) {
1594                        ALOGE("%s: Error: buffer is already set", __func__);
1595                    } else {
1596                        j->buffer = (camera3_stream_buffer_t *)malloc(
1597                                sizeof(camera3_stream_buffer_t));
1598                        *(j->buffer) = *buffer;
1599                        ALOGV("%s: cache buffer %p at result frame_number %d",
1600                                __func__, buffer, frame_number);
1601                    }
1602                }
1603            }
1604        }
1605    }
1606    pthread_mutex_unlock(&mMutex);
1607    return;
1608}
1609
1610/*===========================================================================
1611 * FUNCTION   : translateCbMetadataToResultMetadata
1612 *
1613 * DESCRIPTION:
1614 *
1615 * PARAMETERS :
1616 *   @metadata : metadata information from callback
1617 *
1618 * RETURN     : camera_metadata_t*
1619 *              metadata in a format specified by fwk
1620 *==========================================================================*/
1621camera_metadata_t*
1622QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1623                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1624                                 int32_t request_id, int32_t BlobRequest,
1625                                 jpeg_settings_t* inputjpegsettings)
1626{
1627    CameraMetadata camMetadata;
1628    camera_metadata_t* resultMetadata;
1629
1630    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1631    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1632
1633    // Update the JPEG related info
1634    if (BlobRequest) {
1635        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1636        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1637
1638        int32_t thumbnailSizeTable[2];
1639        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1640        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1641        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1642        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1643               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1644
1645        if (inputjpegsettings->gps_coordinates[0]) {
1646            double gpsCoordinates[3];
1647            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1648            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1649            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1650            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1651            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1652                 gpsCoordinates[1],gpsCoordinates[2]);
1653        }
1654
1655        if (inputjpegsettings->gps_timestamp) {
1656            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1657            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1658        }
1659
1660        String8 str(inputjpegsettings->gps_processing_method);
1661        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1662            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1663        }
1664    }
1665    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1666    uint8_t next_entry;
1667    while (curr_entry != CAM_INTF_PARM_MAX) {
1668       switch (curr_entry) {
1669         case CAM_INTF_META_FACE_DETECTION:{
1670             cam_face_detection_data_t *faceDetectionInfo =
1671                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1672             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1673             int32_t faceIds[numFaces];
1674             uint8_t faceScores[numFaces];
1675             int32_t faceRectangles[numFaces * 4];
1676             int32_t faceLandmarks[numFaces * 6];
1677             int j = 0, k = 0;
1678             for (int i = 0; i < numFaces; i++) {
1679                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1680                 faceScores[i] = faceDetectionInfo->faces[i].score;
1681                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1682                         faceRectangles+j, -1);
1683                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1684                 j+= 4;
1685                 k+= 6;
1686             }
1687             if (numFaces > 0) {
1688                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1689                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1690                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1691                     faceRectangles, numFaces*4);
1692                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1693                     faceLandmarks, numFaces*6);
1694             }
1695            break;
1696            }
1697         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1698             uint8_t  *color_correct_mode =
1699                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1700             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1701             break;
1702          }
1703         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1704             int32_t  *ae_precapture_id =
1705                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1706             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1707             break;
1708          }
1709         case CAM_INTF_META_AEC_ROI: {
1710            cam_area_t  *hAeRegions =
1711                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1712             int32_t aeRegions[5];
1713             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1714             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1715             break;
1716          }
1717          case CAM_INTF_META_AEC_STATE:{
1718             uint8_t *ae_state =
1719                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1720             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1721             break;
1722          }
1723          case CAM_INTF_PARM_FOCUS_MODE:{
1724             uint8_t  *focusMode =
1725                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1726             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1727                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1728             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1729             break;
1730          }
1731          case CAM_INTF_META_AF_ROI:{
1732             /*af regions*/
1733             cam_area_t  *hAfRegions =
1734                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1735             int32_t afRegions[5];
1736             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1737             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1738             break;
1739          }
1740          case CAM_INTF_META_AF_STATE: {
1741             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1742             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1743             break;
1744          }
1745          case CAM_INTF_META_AF_TRIGGER_ID: {
1746             int32_t  *afTriggerId =
1747                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1748             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1749             break;
1750          }
1751          case CAM_INTF_PARM_WHITE_BALANCE: {
1752               uint8_t  *whiteBalance =
1753                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1754               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1755                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1756                   *whiteBalance);
1757               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1758               break;
1759          }
1760          case CAM_INTF_META_AWB_REGIONS: {
1761             /*awb regions*/
1762             cam_area_t  *hAwbRegions =
1763                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1764             int32_t awbRegions[5];
1765             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1766             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1767             break;
1768          }
1769          case CAM_INTF_META_AWB_STATE: {
1770             uint8_t  *whiteBalanceState =
1771                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1772             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1773             break;
1774          }
1775          case CAM_INTF_META_MODE: {
1776             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1777             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1778             break;
1779          }
1780          case CAM_INTF_META_EDGE_MODE: {
1781             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1782             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1783             break;
1784          }
1785          case CAM_INTF_META_FLASH_POWER: {
1786             uint8_t  *flashPower =
1787                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1788             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1789             break;
1790          }
1791          case CAM_INTF_META_FLASH_FIRING_TIME: {
1792             int64_t  *flashFiringTime =
1793                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1794             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1795             break;
1796          }
1797          case CAM_INTF_META_FLASH_STATE: {
1798             uint8_t  *flashState =
1799                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1800             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1801             break;
1802          }
1803          case CAM_INTF_META_FLASH_MODE:{
1804             uint8_t *flashMode = (uint8_t*)
1805                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1806             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1807             break;
1808          }
1809          case CAM_INTF_META_HOTPIXEL_MODE: {
1810              uint8_t  *hotPixelMode =
1811                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1812              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1813              break;
1814          }
1815          case CAM_INTF_META_LENS_APERTURE:{
1816             float  *lensAperture =
1817                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1818             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1819             break;
1820          }
1821          case CAM_INTF_META_LENS_FILTERDENSITY: {
1822             float  *filterDensity =
1823                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1824             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1825             break;
1826          }
1827          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1828             float  *focalLength =
1829                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1830             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1831             break;
1832          }
1833          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1834             float  *focusDistance =
1835                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1836             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1837             break;
1838          }
1839          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1840             float  *focusRange =
1841                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1842             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1843             break;
1844          }
1845          case CAM_INTF_META_LENS_STATE: {
1846             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
1847             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
1848             break;
1849          }
1850          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1851             uint8_t  *opticalStab =
1852                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1853             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1854             break;
1855          }
1856          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1857             uint8_t  *noiseRedMode =
1858                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1859             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1860             break;
1861          }
1862          case CAM_INTF_META_SCALER_CROP_REGION: {
1863             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1864             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1865             int32_t scalerCropRegion[4];
1866             scalerCropRegion[0] = hScalerCropRegion->left;
1867             scalerCropRegion[1] = hScalerCropRegion->top;
1868             scalerCropRegion[2] = hScalerCropRegion->width;
1869             scalerCropRegion[3] = hScalerCropRegion->height;
1870             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1871             break;
1872          }
1873          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1874             int64_t  *sensorExpTime =
1875                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1876             mMetadataResponse.exposure_time = *sensorExpTime;
1877             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1878             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1879             break;
1880          }
1881          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1882             int64_t  *sensorFameDuration =
1883                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1884             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1885             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1886             break;
1887          }
1888          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1889             int32_t  *sensorSensitivity =
1890                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1891             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1892             mMetadataResponse.iso_speed = *sensorSensitivity;
1893             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1894             break;
1895          }
1896          case CAM_INTF_META_SHADING_MODE: {
1897             uint8_t  *shadingMode =
1898                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1899             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1900             break;
1901          }
1902          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1903             uint8_t  *faceDetectMode =
1904                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1905             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1906                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1907                                                        *faceDetectMode);
1908             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1909             break;
1910          }
1911          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1912             uint8_t  *histogramMode =
1913                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1914             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1915             break;
1916          }
1917          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1918               uint8_t  *sharpnessMapMode =
1919                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1920               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1921                                  sharpnessMapMode, 1);
1922               break;
1923           }
1924          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1925               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1926               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1927               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1928                                  (int32_t*)sharpnessMap->sharpness,
1929                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1930               break;
1931          }
1932          case CAM_INTF_META_LENS_SHADING_MAP: {
1933               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1934               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1935               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1936               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1937               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1938                                  (float*)lensShadingMap->lens_shading,
1939                                  4*map_width*map_height);
1940               break;
1941          }
1942          case CAM_INTF_META_TONEMAP_CURVES:{
1943             //Populate CAM_INTF_META_TONEMAP_CURVES
1944             /* ch0 = G, ch 1 = B, ch 2 = R*/
1945             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1946             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1947             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1948                                (float*)tonemap->curves[0].tonemap_points,
1949                                tonemap->tonemap_points_cnt * 2);
1950
1951             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1952                                (float*)tonemap->curves[1].tonemap_points,
1953                                tonemap->tonemap_points_cnt * 2);
1954
1955             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1956                                (float*)tonemap->curves[2].tonemap_points,
1957                                tonemap->tonemap_points_cnt * 2);
1958             break;
1959          }
1960          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1961             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1962             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1963             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1964             break;
1965          }
1966          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1967              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1968              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1969              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1970                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1971              break;
1972          }
1973          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1974             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1975             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1976             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1977                       predColorCorrectionGains->gains, 4);
1978             break;
1979          }
1980          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1981             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1982                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1983             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1984                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1985             break;
1986
1987          }
1988          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1989             uint8_t *blackLevelLock = (uint8_t*)
1990               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1991             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1992             break;
1993          }
1994          case CAM_INTF_META_SCENE_FLICKER:{
1995             uint8_t *sceneFlicker = (uint8_t*)
1996             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1997             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1998             break;
1999          }
2000          case CAM_INTF_PARM_LED_MODE:
2001             break;
2002          default:
2003             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2004                   __func__, curr_entry);
2005             break;
2006       }
2007       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2008       curr_entry = next_entry;
2009    }
2010    resultMetadata = camMetadata.release();
2011    return resultMetadata;
2012}
2013
2014/*===========================================================================
2015 * FUNCTION   : convertToRegions
2016 *
2017 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2018 *
2019 * PARAMETERS :
2020 *   @rect   : cam_rect_t struct to convert
2021 *   @region : int32_t destination array
2022 *   @weight : if we are converting from cam_area_t, weight is valid
2023 *             else weight = -1
2024 *
2025 *==========================================================================*/
2026void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2027    region[0] = rect.left;
2028    region[1] = rect.top;
2029    region[2] = rect.left + rect.width;
2030    region[3] = rect.top + rect.height;
2031    if (weight > -1) {
2032        region[4] = weight;
2033    }
2034}
2035
2036/*===========================================================================
2037 * FUNCTION   : convertFromRegions
2038 *
2039 * DESCRIPTION: helper method to convert from array to cam_rect_t
2040 *
2041 * PARAMETERS :
2042 *   @rect   : cam_rect_t struct to convert
2043 *   @region : int32_t destination array
2044 *   @weight : if we are converting from cam_area_t, weight is valid
2045 *             else weight = -1
2046 *
2047 *==========================================================================*/
2048void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2049                                                   const camera_metadata_t *settings,
2050                                                   uint32_t tag){
2051    CameraMetadata frame_settings;
2052    frame_settings = settings;
2053    int32_t x_min = frame_settings.find(tag).data.i32[0];
2054    int32_t y_min = frame_settings.find(tag).data.i32[1];
2055    int32_t x_max = frame_settings.find(tag).data.i32[2];
2056    int32_t y_max = frame_settings.find(tag).data.i32[3];
2057    roi->weight = frame_settings.find(tag).data.i32[4];
2058    roi->rect.left = x_min;
2059    roi->rect.top = y_min;
2060    roi->rect.width = x_max - x_min;
2061    roi->rect.height = y_max - y_min;
2062}
2063
2064/*===========================================================================
2065 * FUNCTION   : resetIfNeededROI
2066 *
2067 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2068 *              crop region
2069 *
2070 * PARAMETERS :
2071 *   @roi       : cam_area_t struct to resize
2072 *   @scalerCropRegion : cam_crop_region_t region to compare against
2073 *
2074 *
2075 *==========================================================================*/
2076bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2077                                                 const cam_crop_region_t* scalerCropRegion)
2078{
2079    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2080    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2081    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2082    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2083    if ((roi_x_max < scalerCropRegion->left) ||
2084        (roi_y_max < scalerCropRegion->top)  ||
2085        (roi->rect.left > crop_x_max) ||
2086        (roi->rect.top > crop_y_max)){
2087        return false;
2088    }
2089    if (roi->rect.left < scalerCropRegion->left) {
2090        roi->rect.left = scalerCropRegion->left;
2091    }
2092    if (roi->rect.top < scalerCropRegion->top) {
2093        roi->rect.top = scalerCropRegion->top;
2094    }
2095    if (roi_x_max > crop_x_max) {
2096        roi_x_max = crop_x_max;
2097    }
2098    if (roi_y_max > crop_y_max) {
2099        roi_y_max = crop_y_max;
2100    }
2101    roi->rect.width = roi_x_max - roi->rect.left;
2102    roi->rect.height = roi_y_max - roi->rect.top;
2103    return true;
2104}
2105
2106/*===========================================================================
2107 * FUNCTION   : convertLandmarks
2108 *
2109 * DESCRIPTION: helper method to extract the landmarks from face detection info
2110 *
2111 * PARAMETERS :
2112 *   @face   : cam_rect_t struct to convert
2113 *   @landmarks : int32_t destination array
2114 *
2115 *
2116 *==========================================================================*/
2117void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2118{
2119    landmarks[0] = face.left_eye_center.x;
2120    landmarks[1] = face.left_eye_center.y;
2121    landmarks[2] = face.right_eye_center.x;
2122    landmarks[3] = face.right_eye_center.y;
2123    landmarks[4] = face.mouth_center.x;
2124    landmarks[5] = face.mouth_center.y;
2125}
2126
2127#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2128/*===========================================================================
2129 * FUNCTION   : initCapabilities
2130 *
2131 * DESCRIPTION: initialize camera capabilities in static data struct
2132 *
2133 * PARAMETERS :
2134 *   @cameraId  : camera Id
2135 *
2136 * RETURN     : int32_t type of status
2137 *              NO_ERROR  -- success
2138 *              none-zero failure code
2139 *==========================================================================*/
2140int QCamera3HardwareInterface::initCapabilities(int cameraId)
2141{
2142    int rc = 0;
2143    mm_camera_vtbl_t *cameraHandle = NULL;
2144    QCamera3HeapMemory *capabilityHeap = NULL;
2145
2146    cameraHandle = camera_open(cameraId);
2147    if (!cameraHandle) {
2148        ALOGE("%s: camera_open failed", __func__);
2149        rc = -1;
2150        goto open_failed;
2151    }
2152
2153    capabilityHeap = new QCamera3HeapMemory();
2154    if (capabilityHeap == NULL) {
2155        ALOGE("%s: creation of capabilityHeap failed", __func__);
2156        goto heap_creation_failed;
2157    }
2158    /* Allocate memory for capability buffer */
2159    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2160    if(rc != OK) {
2161        ALOGE("%s: No memory for cappability", __func__);
2162        goto allocate_failed;
2163    }
2164
2165    /* Map memory for capability buffer */
2166    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2167    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2168                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2169                                capabilityHeap->getFd(0),
2170                                sizeof(cam_capability_t));
2171    if(rc < 0) {
2172        ALOGE("%s: failed to map capability buffer", __func__);
2173        goto map_failed;
2174    }
2175
2176    /* Query Capability */
2177    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2178    if(rc < 0) {
2179        ALOGE("%s: failed to query capability",__func__);
2180        goto query_failed;
2181    }
2182    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2183    if (!gCamCapability[cameraId]) {
2184        ALOGE("%s: out of memory", __func__);
2185        goto query_failed;
2186    }
2187    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2188                                        sizeof(cam_capability_t));
2189    rc = 0;
2190
2191query_failed:
2192    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2193                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2194map_failed:
2195    capabilityHeap->deallocate();
2196allocate_failed:
2197    delete capabilityHeap;
2198heap_creation_failed:
2199    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2200    cameraHandle = NULL;
2201open_failed:
2202    return rc;
2203}
2204
2205/*===========================================================================
2206 * FUNCTION   : initParameters
2207 *
2208 * DESCRIPTION: initialize camera parameters
2209 *
2210 * PARAMETERS :
2211 *
2212 * RETURN     : int32_t type of status
2213 *              NO_ERROR  -- success
2214 *              none-zero failure code
2215 *==========================================================================*/
2216int QCamera3HardwareInterface::initParameters()
2217{
2218    int rc = 0;
2219
2220    //Allocate Set Param Buffer
2221    mParamHeap = new QCamera3HeapMemory();
2222    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2223    if(rc != OK) {
2224        rc = NO_MEMORY;
2225        ALOGE("Failed to allocate SETPARM Heap memory");
2226        delete mParamHeap;
2227        mParamHeap = NULL;
2228        return rc;
2229    }
2230
2231    //Map memory for parameters buffer
2232    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2233            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2234            mParamHeap->getFd(0),
2235            sizeof(parm_buffer_t));
2236    if(rc < 0) {
2237        ALOGE("%s:failed to map SETPARM buffer",__func__);
2238        rc = FAILED_TRANSACTION;
2239        mParamHeap->deallocate();
2240        delete mParamHeap;
2241        mParamHeap = NULL;
2242        return rc;
2243    }
2244
2245    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2246    return rc;
2247}
2248
2249/*===========================================================================
2250 * FUNCTION   : deinitParameters
2251 *
2252 * DESCRIPTION: de-initialize camera parameters
2253 *
2254 * PARAMETERS :
2255 *
2256 * RETURN     : NONE
2257 *==========================================================================*/
2258void QCamera3HardwareInterface::deinitParameters()
2259{
2260    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2261            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2262
2263    mParamHeap->deallocate();
2264    delete mParamHeap;
2265    mParamHeap = NULL;
2266
2267    mParameters = NULL;
2268}
2269
2270/*===========================================================================
2271 * FUNCTION   : calcMaxJpegSize
2272 *
2273 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2274 *
2275 * PARAMETERS :
2276 *
2277 * RETURN     : max_jpeg_size
2278 *==========================================================================*/
2279int QCamera3HardwareInterface::calcMaxJpegSize()
2280{
2281    int32_t max_jpeg_size = 0;
2282    int temp_width, temp_height;
2283    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2284        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2285        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2286        if (temp_width * temp_height > max_jpeg_size ) {
2287            max_jpeg_size = temp_width * temp_height;
2288        }
2289    }
2290    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2291    return max_jpeg_size;
2292}
2293
2294/*===========================================================================
2295 * FUNCTION   : initStaticMetadata
2296 *
2297 * DESCRIPTION: initialize the static metadata
2298 *
2299 * PARAMETERS :
2300 *   @cameraId  : camera Id
2301 *
2302 * RETURN     : int32_t type of status
2303 *              0  -- success
2304 *              non-zero failure code
2305 *==========================================================================*/
2306int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2307{
2308    int rc = 0;
2309    CameraMetadata staticInfo;
2310
2311    /* android.info: hardware level */
2312    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2313    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2314        &supportedHardwareLevel, 1);
2315
2316    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2317    /*HAL 3 only*/
2318    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2319                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2320
2321    /*hard coded for now but this should come from sensor*/
2322    float min_focus_distance;
2323    if(facingBack){
2324        min_focus_distance = 10;
2325    } else {
2326        min_focus_distance = 0;
2327    }
2328    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2329                    &min_focus_distance, 1);
2330
2331    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2332                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2333
2334    /*should be using focal lengths but sensor doesn't provide that info now*/
2335    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2336                      &gCamCapability[cameraId]->focal_length,
2337                      1);
2338
2339    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2340                      gCamCapability[cameraId]->apertures,
2341                      gCamCapability[cameraId]->apertures_count);
2342
2343    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2344                gCamCapability[cameraId]->filter_densities,
2345                gCamCapability[cameraId]->filter_densities_count);
2346
2347
2348    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2349                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2350                      gCamCapability[cameraId]->optical_stab_modes_count);
2351
2352    staticInfo.update(ANDROID_LENS_POSITION,
2353                      gCamCapability[cameraId]->lens_position,
2354                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2355
2356    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2357                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2358    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2359                      lens_shading_map_size,
2360                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2361
2362    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2363                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2364    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2365            geo_correction_map_size,
2366            sizeof(geo_correction_map_size)/sizeof(int32_t));
2367
2368    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2369                       gCamCapability[cameraId]->geo_correction_map,
2370                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2371
2372    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2373            gCamCapability[cameraId]->sensor_physical_size, 2);
2374
2375    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2376            gCamCapability[cameraId]->exposure_time_range, 2);
2377
2378    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2379            &gCamCapability[cameraId]->max_frame_duration, 1);
2380
2381    camera_metadata_rational baseGainFactor = {
2382            gCamCapability[cameraId]->base_gain_factor.numerator,
2383            gCamCapability[cameraId]->base_gain_factor.denominator};
2384    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2385                      &baseGainFactor, 1);
2386
2387    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2388                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2389
2390    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2391                                               gCamCapability[cameraId]->pixel_array_size.height};
2392    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2393                      pixel_array_size, 2);
2394
2395    int32_t active_array_size[] = {0, 0,
2396                                                gCamCapability[cameraId]->active_array_size.width,
2397                                                gCamCapability[cameraId]->active_array_size.height};
2398    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2399                      active_array_size, 4);
2400
2401    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2402            &gCamCapability[cameraId]->white_level, 1);
2403
2404    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2405            gCamCapability[cameraId]->black_level_pattern, 4);
2406
2407    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2408                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2409
2410    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2411                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2412
2413    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2414                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2415
2416    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2417                      &gCamCapability[cameraId]->histogram_size, 1);
2418
2419    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2420            &gCamCapability[cameraId]->max_histogram_count, 1);
2421
2422    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2423                                                gCamCapability[cameraId]->sharpness_map_size.height};
2424
2425    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2426            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2427
2428    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2429            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2430
2431
2432    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2433                      &gCamCapability[cameraId]->raw_min_duration,
2434                       1);
2435
2436    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2437                                                HAL_PIXEL_FORMAT_BLOB};
2438    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2439    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2440                      scalar_formats,
2441                      scalar_formats_count);
2442
2443    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2444    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2445              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2446              available_processed_sizes);
2447    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2448                available_processed_sizes,
2449                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2450
2451    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2452                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2453                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2454
2455    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2456    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2457                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2458                 available_fps_ranges);
2459    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2460            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2461
2462    camera_metadata_rational exposureCompensationStep = {
2463            gCamCapability[cameraId]->exp_compensation_step.numerator,
2464            gCamCapability[cameraId]->exp_compensation_step.denominator};
2465    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2466                      &exposureCompensationStep, 1);
2467
2468    /*TO DO*/
2469    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2470    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2471                      availableVstabModes, sizeof(availableVstabModes));
2472
2473    /*HAL 1 and HAL 3 common*/
2474    float maxZoom = 4;
2475    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2476            &maxZoom, 1);
2477
2478    int32_t max3aRegions = 1;
2479    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2480            &max3aRegions, 1);
2481
2482    uint8_t availableFaceDetectModes[] = {
2483            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2484            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2485    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2486                      availableFaceDetectModes,
2487                      sizeof(availableFaceDetectModes));
2488
2489    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2490                                                        gCamCapability[cameraId]->exposure_compensation_max};
2491    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2492            exposureCompensationRange,
2493            sizeof(exposureCompensationRange)/sizeof(int32_t));
2494
2495    uint8_t lensFacing = (facingBack) ?
2496            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2497    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2498
2499    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2500                available_processed_sizes,
2501                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2502
2503    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2504                      available_thumbnail_sizes,
2505                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2506
2507    int32_t max_jpeg_size = 0;
2508    int temp_width, temp_height;
2509    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2510        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2511        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2512        if (temp_width * temp_height > max_jpeg_size ) {
2513            max_jpeg_size = temp_width * temp_height;
2514        }
2515    }
2516    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2517    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2518                      &max_jpeg_size, 1);
2519
2520    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2521    int32_t size = 0;
2522    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2523        int val = lookupFwkName(EFFECT_MODES_MAP,
2524                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2525                                   gCamCapability[cameraId]->supported_effects[i]);
2526        if (val != NAME_NOT_FOUND) {
2527            avail_effects[size] = (uint8_t)val;
2528            size++;
2529        }
2530    }
2531    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2532                      avail_effects,
2533                      size);
2534
2535    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2536    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2537    int32_t supported_scene_modes_cnt = 0;
2538    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2539        int val = lookupFwkName(SCENE_MODES_MAP,
2540                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2541                                gCamCapability[cameraId]->supported_scene_modes[i]);
2542        if (val != NAME_NOT_FOUND) {
2543            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2544            supported_indexes[supported_scene_modes_cnt] = i;
2545            supported_scene_modes_cnt++;
2546        }
2547    }
2548
2549    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2550                      avail_scene_modes,
2551                      supported_scene_modes_cnt);
2552
2553    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2554    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2555                      supported_scene_modes_cnt,
2556                      scene_mode_overrides,
2557                      supported_indexes,
2558                      cameraId);
2559    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2560                      scene_mode_overrides,
2561                      supported_scene_modes_cnt*3);
2562
2563    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2564    size = 0;
2565    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2566        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2567                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2568                                 gCamCapability[cameraId]->supported_antibandings[i]);
2569        if (val != NAME_NOT_FOUND) {
2570            avail_antibanding_modes[size] = (uint8_t)val;
2571            size++;
2572        }
2573
2574    }
2575    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2576                      avail_antibanding_modes,
2577                      size);
2578
2579    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2580    size = 0;
2581    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2582        int val = lookupFwkName(FOCUS_MODES_MAP,
2583                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2584                                gCamCapability[cameraId]->supported_focus_modes[i]);
2585        if (val != NAME_NOT_FOUND) {
2586            avail_af_modes[size] = (uint8_t)val;
2587            size++;
2588        }
2589    }
2590    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2591                      avail_af_modes,
2592                      size);
2593
2594    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2595    size = 0;
2596    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2597        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2598                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2599                                    gCamCapability[cameraId]->supported_white_balances[i]);
2600        if (val != NAME_NOT_FOUND) {
2601            avail_awb_modes[size] = (uint8_t)val;
2602            size++;
2603        }
2604    }
2605    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2606                      avail_awb_modes,
2607                      size);
2608
2609    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2610    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2611      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2612
2613    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2614            available_flash_levels,
2615            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2616
2617
2618    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2619    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2620            &flashAvailable, 1);
2621
2622    uint8_t avail_ae_modes[5];
2623    size = 0;
2624    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2625        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2626        size++;
2627    }
2628    if (flashAvailable) {
2629        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2630        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2631        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2632    }
2633    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2634                      avail_ae_modes,
2635                      size);
2636
2637    int32_t sensitivity_range[2];
2638    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2639    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2640    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2641                      sensitivity_range,
2642                      sizeof(sensitivity_range) / sizeof(int32_t));
2643
2644    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2645                      &gCamCapability[cameraId]->max_analog_sensitivity,
2646                      1);
2647
2648    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2649                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2650                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2651
2652    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2653    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2654                      &sensor_orientation,
2655                      1);
2656
2657    int32_t max_output_streams[3] = {1, 3, 1};
2658    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2659                      max_output_streams,
2660                      3);
2661
2662    gStaticMetadata[cameraId] = staticInfo.release();
2663    return rc;
2664}
2665
2666/*===========================================================================
2667 * FUNCTION   : makeTable
2668 *
2669 * DESCRIPTION: make a table of sizes
2670 *
2671 * PARAMETERS :
2672 *
2673 *
2674 *==========================================================================*/
2675void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2676                                          int32_t* sizeTable)
2677{
2678    int j = 0;
2679    for (int i = 0; i < size; i++) {
2680        sizeTable[j] = dimTable[i].width;
2681        sizeTable[j+1] = dimTable[i].height;
2682        j+=2;
2683    }
2684}
2685
2686/*===========================================================================
2687 * FUNCTION   : makeFPSTable
2688 *
2689 * DESCRIPTION: make a table of fps ranges
2690 *
2691 * PARAMETERS :
2692 *
2693 *==========================================================================*/
2694void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2695                                          int32_t* fpsRangesTable)
2696{
2697    int j = 0;
2698    for (int i = 0; i < size; i++) {
2699        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2700        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2701        j+=2;
2702    }
2703}
2704
2705/*===========================================================================
2706 * FUNCTION   : makeOverridesList
2707 *
2708 * DESCRIPTION: make a list of scene mode overrides
2709 *
2710 * PARAMETERS :
2711 *
2712 *
2713 *==========================================================================*/
2714void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2715                                                  uint8_t size, uint8_t* overridesList,
2716                                                  uint8_t* supported_indexes,
2717                                                  int camera_id)
2718{
2719    /*daemon will give a list of overrides for all scene modes.
2720      However we should send the fwk only the overrides for the scene modes
2721      supported by the framework*/
2722    int j = 0, index = 0, supt = 0;
2723    uint8_t focus_override;
2724    for (int i = 0; i < size; i++) {
2725        supt = 0;
2726        index = supported_indexes[i];
2727        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2728        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2729                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2730                                                    overridesTable[index].awb_mode);
2731        focus_override = (uint8_t)overridesTable[index].af_mode;
2732        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2733           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2734              supt = 1;
2735              break;
2736           }
2737        }
2738        if (supt) {
2739           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2740                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2741                                              focus_override);
2742        } else {
2743           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2744        }
2745        j+=3;
2746    }
2747}
2748
2749/*===========================================================================
2750 * FUNCTION   : getPreviewHalPixelFormat
2751 *
2752 * DESCRIPTION: convert the format to type recognized by framework
2753 *
2754 * PARAMETERS : format : the format from backend
2755 *
2756 ** RETURN    : format recognized by framework
2757 *
2758 *==========================================================================*/
2759int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2760{
2761    int32_t halPixelFormat;
2762
2763    switch (format) {
2764    case CAM_FORMAT_YUV_420_NV12:
2765        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2766        break;
2767    case CAM_FORMAT_YUV_420_NV21:
2768        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2769        break;
2770    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2771        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2772        break;
2773    case CAM_FORMAT_YUV_420_YV12:
2774        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2775        break;
2776    case CAM_FORMAT_YUV_422_NV16:
2777    case CAM_FORMAT_YUV_422_NV61:
2778    default:
2779        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2780        break;
2781    }
2782    return halPixelFormat;
2783}
2784
2785/*===========================================================================
2786 * FUNCTION   : getSensorSensitivity
2787 *
2788 * DESCRIPTION: convert iso_mode to an integer value
2789 *
2790 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2791 *
2792 ** RETURN    : sensitivity supported by sensor
2793 *
2794 *==========================================================================*/
2795int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2796{
2797    int32_t sensitivity;
2798
2799    switch (iso_mode) {
2800    case CAM_ISO_MODE_100:
2801        sensitivity = 100;
2802        break;
2803    case CAM_ISO_MODE_200:
2804        sensitivity = 200;
2805        break;
2806    case CAM_ISO_MODE_400:
2807        sensitivity = 400;
2808        break;
2809    case CAM_ISO_MODE_800:
2810        sensitivity = 800;
2811        break;
2812    case CAM_ISO_MODE_1600:
2813        sensitivity = 1600;
2814        break;
2815    default:
2816        sensitivity = -1;
2817        break;
2818    }
2819    return sensitivity;
2820}
2821
2822
2823/*===========================================================================
2824 * FUNCTION   : AddSetParmEntryToBatch
2825 *
2826 * DESCRIPTION: add set parameter entry into batch
2827 *
2828 * PARAMETERS :
2829 *   @p_table     : ptr to parameter buffer
2830 *   @paramType   : parameter type
2831 *   @paramLength : length of parameter value
2832 *   @paramValue  : ptr to parameter value
2833 *
2834 * RETURN     : int32_t type of status
2835 *              NO_ERROR  -- success
2836 *              none-zero failure code
2837 *==========================================================================*/
2838int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2839                                                          cam_intf_parm_type_t paramType,
2840                                                          uint32_t paramLength,
2841                                                          void *paramValue)
2842{
2843    int position = paramType;
2844    int current, next;
2845
2846    /*************************************************************************
2847    *                 Code to take care of linking next flags                *
2848    *************************************************************************/
2849    current = GET_FIRST_PARAM_ID(p_table);
2850    if (position == current){
2851        //DO NOTHING
2852    } else if (position < current){
2853        SET_NEXT_PARAM_ID(position, p_table, current);
2854        SET_FIRST_PARAM_ID(p_table, position);
2855    } else {
2856        /* Search for the position in the linked list where we need to slot in*/
2857        while (position > GET_NEXT_PARAM_ID(current, p_table))
2858            current = GET_NEXT_PARAM_ID(current, p_table);
2859
2860        /*If node already exists no need to alter linking*/
2861        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2862            next = GET_NEXT_PARAM_ID(current, p_table);
2863            SET_NEXT_PARAM_ID(current, p_table, position);
2864            SET_NEXT_PARAM_ID(position, p_table, next);
2865        }
2866    }
2867
2868    /*************************************************************************
2869    *                   Copy contents into entry                             *
2870    *************************************************************************/
2871
2872    if (paramLength > sizeof(parm_type_t)) {
2873        ALOGE("%s:Size of input larger than max entry size",__func__);
2874        return BAD_VALUE;
2875    }
2876    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2877    return NO_ERROR;
2878}
2879
2880/*===========================================================================
2881 * FUNCTION   : lookupFwkName
2882 *
2883 * DESCRIPTION: In case the enum is not same in fwk and backend
2884 *              make sure the parameter is correctly propogated
2885 *
2886 * PARAMETERS  :
2887 *   @arr      : map between the two enums
2888 *   @len      : len of the map
2889 *   @hal_name : name of the hal_parm to map
2890 *
2891 * RETURN     : int type of status
2892 *              fwk_name  -- success
2893 *              none-zero failure code
2894 *==========================================================================*/
2895int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2896                                             int len, int hal_name)
2897{
2898
2899    for (int i = 0; i < len; i++) {
2900        if (arr[i].hal_name == hal_name)
2901            return arr[i].fwk_name;
2902    }
2903
2904    /* Not able to find matching framework type is not necessarily
2905     * an error case. This happens when mm-camera supports more attributes
2906     * than the frameworks do */
2907    ALOGD("%s: Cannot find matching framework type", __func__);
2908    return NAME_NOT_FOUND;
2909}
2910
2911/*===========================================================================
2912 * FUNCTION   : lookupHalName
2913 *
2914 * DESCRIPTION: In case the enum is not same in fwk and backend
2915 *              make sure the parameter is correctly propogated
2916 *
2917 * PARAMETERS  :
2918 *   @arr      : map between the two enums
2919 *   @len      : len of the map
2920 *   @fwk_name : name of the hal_parm to map
2921 *
2922 * RETURN     : int32_t type of status
2923 *              hal_name  -- success
2924 *              none-zero failure code
2925 *==========================================================================*/
2926int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2927                                             int len, int fwk_name)
2928{
2929    for (int i = 0; i < len; i++) {
2930       if (arr[i].fwk_name == fwk_name)
2931           return arr[i].hal_name;
2932    }
2933    ALOGE("%s: Cannot find matching hal type", __func__);
2934    return NAME_NOT_FOUND;
2935}
2936
2937/*===========================================================================
2938 * FUNCTION   : getCapabilities
2939 *
2940 * DESCRIPTION: query camera capabilities
2941 *
2942 * PARAMETERS :
2943 *   @cameraId  : camera Id
2944 *   @info      : camera info struct to be filled in with camera capabilities
2945 *
2946 * RETURN     : int32_t type of status
2947 *              NO_ERROR  -- success
2948 *              none-zero failure code
2949 *==========================================================================*/
2950int QCamera3HardwareInterface::getCamInfo(int cameraId,
2951                                    struct camera_info *info)
2952{
2953    int rc = 0;
2954
2955    if (NULL == gCamCapability[cameraId]) {
2956        rc = initCapabilities(cameraId);
2957        if (rc < 0) {
2958            //pthread_mutex_unlock(&g_camlock);
2959            return rc;
2960        }
2961    }
2962
2963    if (NULL == gStaticMetadata[cameraId]) {
2964        rc = initStaticMetadata(cameraId);
2965        if (rc < 0) {
2966            return rc;
2967        }
2968    }
2969
2970    switch(gCamCapability[cameraId]->position) {
2971    case CAM_POSITION_BACK:
2972        info->facing = CAMERA_FACING_BACK;
2973        break;
2974
2975    case CAM_POSITION_FRONT:
2976        info->facing = CAMERA_FACING_FRONT;
2977        break;
2978
2979    default:
2980        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2981        rc = -1;
2982        break;
2983    }
2984
2985
2986    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2987    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2988    info->static_camera_characteristics = gStaticMetadata[cameraId];
2989
2990    return rc;
2991}
2992
2993/*===========================================================================
2994 * FUNCTION   : translateMetadata
2995 *
2996 * DESCRIPTION: translate the metadata into camera_metadata_t
2997 *
2998 * PARAMETERS : type of the request
2999 *
3000 *
3001 * RETURN     : success: camera_metadata_t*
3002 *              failure: NULL
3003 *
3004 *==========================================================================*/
3005camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3006{
3007    pthread_mutex_lock(&mMutex);
3008
3009    if (mDefaultMetadata[type] != NULL) {
3010        pthread_mutex_unlock(&mMutex);
3011        return mDefaultMetadata[type];
3012    }
3013    //first time we are handling this request
3014    //fill up the metadata structure using the wrapper class
3015    CameraMetadata settings;
3016    //translate from cam_capability_t to camera_metadata_tag_t
3017    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3018    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3019    int32_t defaultRequestID = 0;
3020    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3021
3022    /*control*/
3023
3024    uint8_t controlIntent = 0;
3025    switch (type) {
3026      case CAMERA3_TEMPLATE_PREVIEW:
3027        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3028        break;
3029      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3030        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3031        break;
3032      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3033        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3034        break;
3035      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3036        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3037        break;
3038      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3039        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3040        break;
3041      default:
3042        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3043        break;
3044    }
3045    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3046
3047    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3048            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3049
3050    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3051    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3052
3053    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3054    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3055
3056    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3057    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3058
3059    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3060    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3061
3062    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3063    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3064
3065    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3066    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3067
3068    static uint8_t focusMode;
3069    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
3070        ALOGE("%s: Setting focus mode to auto", __func__);
3071        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
3072    } else {
3073        ALOGE("%s: Setting focus mode to off", __func__);
3074        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3075    }
3076    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3077
3078    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3079    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3080
3081    /*flash*/
3082    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3083    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3084
3085    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3086    settings.update(ANDROID_FLASH_FIRING_POWER,
3087            &flashFiringLevel, 1);
3088
3089    /* lens */
3090    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3091    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3092
3093    if (gCamCapability[mCameraId]->filter_densities_count) {
3094        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3095        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3096                        gCamCapability[mCameraId]->filter_densities_count);
3097    }
3098
3099    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3100    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3101
3102    /* Exposure time(Update the Min Exposure Time)*/
3103    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3104    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3105
3106    /* frame duration */
3107    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3108    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3109
3110    /* sensitivity */
3111    static const int32_t default_sensitivity = 100;
3112    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3113
3114    /*edge mode*/
3115    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3116    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3117
3118    /*noise reduction mode*/
3119    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3120    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3121
3122    /*color correction mode*/
3123    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3124    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3125
3126    /*transform matrix mode*/
3127    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3128    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3129
3130    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3131    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3132
3133    int32_t scaler_crop_region[4];
3134    scaler_crop_region[0] = 0;
3135    scaler_crop_region[1] = 0;
3136    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3137    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3138    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3139
3140    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3141    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3142
3143    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3144    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3145
3146    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3147                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3148                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3149    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3150
3151    mDefaultMetadata[type] = settings.release();
3152
3153    pthread_mutex_unlock(&mMutex);
3154    return mDefaultMetadata[type];
3155}
3156
3157/*===========================================================================
3158 * FUNCTION   : setFrameParameters
3159 *
3160 * DESCRIPTION: set parameters per frame as requested in the metadata from
3161 *              framework
3162 *
3163 * PARAMETERS :
3164 *   @request   : request that needs to be serviced
3165 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3166 *
3167 * RETURN     : success: NO_ERROR
3168 *              failure:
3169 *==========================================================================*/
3170int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3171                    uint32_t streamTypeMask)
3172{
3173    /*translate from camera_metadata_t type to parm_type_t*/
3174    int rc = 0;
3175    if (request->settings == NULL && mFirstRequest) {
3176        /*settings cannot be null for the first request*/
3177        return BAD_VALUE;
3178    }
3179
3180    int32_t hal_version = CAM_HAL_V3;
3181
3182    memset(mParameters, 0, sizeof(parm_buffer_t));
3183    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3184    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3185                sizeof(hal_version), &hal_version);
3186    if (rc < 0) {
3187        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3188        return BAD_VALUE;
3189    }
3190
3191    /*we need to update the frame number in the parameters*/
3192    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3193                                sizeof(request->frame_number), &(request->frame_number));
3194    if (rc < 0) {
3195        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3196        return BAD_VALUE;
3197    }
3198
3199    /* Update stream id mask where buffers are requested */
3200    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3201                                sizeof(streamTypeMask), &streamTypeMask);
3202    if (rc < 0) {
3203        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3204        return BAD_VALUE;
3205    }
3206
3207    if(request->settings != NULL){
3208        rc = translateMetadataToParameters(request);
3209    }
3210    /*set the parameters to backend*/
3211    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3212    return rc;
3213}
3214
3215/*===========================================================================
3216 * FUNCTION   : translateMetadataToParameters
3217 *
3218 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3219 *
3220 *
3221 * PARAMETERS :
3222 *   @request  : request sent from framework
3223 *
3224 *
3225 * RETURN     : success: NO_ERROR
3226 *              failure:
3227 *==========================================================================*/
3228int QCamera3HardwareInterface::translateMetadataToParameters
3229                                  (const camera3_capture_request_t *request)
3230{
3231    int rc = 0;
3232    CameraMetadata frame_settings;
3233    frame_settings = request->settings;
3234
3235    /* Do not change the order of the following list unless you know what you are
3236     * doing.
3237     * The order is laid out in such a way that parameters in the front of the table
3238     * may be used to override the parameters later in the table. Examples are:
3239     * 1. META_MODE should precede AEC/AWB/AF MODE
3240     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3241     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3242     * 4. Any mode should precede it's corresponding settings
3243     */
3244    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3245        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3246        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3247                sizeof(metaMode), &metaMode);
3248        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3249           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3250           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3251                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3252                                             fwk_sceneMode);
3253           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3254                sizeof(sceneMode), &sceneMode);
3255        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3256           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3257           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3258                sizeof(sceneMode), &sceneMode);
3259        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3260           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3261           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3262                sizeof(sceneMode), &sceneMode);
3263        }
3264    }
3265
3266    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3267        uint8_t fwk_aeMode =
3268            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3269        uint8_t aeMode;
3270        int32_t redeye;
3271
3272        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3273            aeMode = CAM_AE_MODE_OFF;
3274        } else {
3275            aeMode = CAM_AE_MODE_ON;
3276        }
3277        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3278            redeye = 1;
3279        } else {
3280            redeye = 0;
3281        }
3282
3283        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3284                                          sizeof(AE_FLASH_MODE_MAP),
3285                                          fwk_aeMode);
3286        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3287                sizeof(aeMode), &aeMode);
3288        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3289                sizeof(flashMode), &flashMode);
3290        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3291                sizeof(redeye), &redeye);
3292    }
3293
3294    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3295        uint8_t fwk_whiteLevel =
3296            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3297        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3298                sizeof(WHITE_BALANCE_MODES_MAP),
3299                fwk_whiteLevel);
3300        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3301                sizeof(whiteLevel), &whiteLevel);
3302    }
3303
3304    float focalDistance = -1.0;
3305    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3306        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3307        rc = AddSetParmEntryToBatch(mParameters,
3308                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3309                sizeof(focalDistance), &focalDistance);
3310    }
3311
3312    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3313        uint8_t fwk_focusMode =
3314            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3315        uint8_t focusMode;
3316        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3317            focusMode = CAM_FOCUS_MODE_INFINITY;
3318        } else{
3319         focusMode = lookupHalName(FOCUS_MODES_MAP,
3320                                   sizeof(FOCUS_MODES_MAP),
3321                                   fwk_focusMode);
3322        }
3323        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3324                sizeof(focusMode), &focusMode);
3325    }
3326
3327    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3328        int32_t antibandingMode =
3329            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3330        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3331                sizeof(antibandingMode), &antibandingMode);
3332    }
3333
3334    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3335        int32_t expCompensation = frame_settings.find(
3336            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3337        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3338            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3339        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3340            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3341        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3342          sizeof(expCompensation), &expCompensation);
3343    }
3344
3345    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3346        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3347        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3348                sizeof(aeLock), &aeLock);
3349    }
3350    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3351        cam_fps_range_t fps_range;
3352        fps_range.min_fps =
3353            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3354        fps_range.max_fps =
3355            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3356        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3357                sizeof(fps_range), &fps_range);
3358    }
3359
3360    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3361        uint8_t awbLock =
3362            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3363        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3364                sizeof(awbLock), &awbLock);
3365    }
3366
3367    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3368        uint8_t fwk_effectMode =
3369            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3370        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3371                sizeof(EFFECT_MODES_MAP),
3372                fwk_effectMode);
3373        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3374                sizeof(effectMode), &effectMode);
3375    }
3376
3377    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3378        uint8_t colorCorrectMode =
3379            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3380        rc =
3381            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3382                    sizeof(colorCorrectMode), &colorCorrectMode);
3383    }
3384
3385    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3386        cam_color_correct_gains_t colorCorrectGains;
3387        for (int i = 0; i < 4; i++) {
3388            colorCorrectGains.gains[i] =
3389                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3390        }
3391        rc =
3392            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3393                    sizeof(colorCorrectGains), &colorCorrectGains);
3394    }
3395
3396    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3397        cam_color_correct_matrix_t colorCorrectTransform;
3398        cam_rational_type_t transform_elem;
3399        int num = 0;
3400        for (int i = 0; i < 3; i++) {
3401           for (int j = 0; j < 3; j++) {
3402              transform_elem.numerator =
3403                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3404              transform_elem.denominator =
3405                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3406              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3407              num++;
3408           }
3409        }
3410        rc =
3411            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3412                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3413    }
3414
3415    cam_trigger_t aecTrigger;
3416    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3417    aecTrigger.trigger_id = -1;
3418    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3419        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3420        aecTrigger.trigger =
3421            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3422        aecTrigger.trigger_id =
3423            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3424    }
3425    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3426                                sizeof(aecTrigger), &aecTrigger);
3427
3428    /*af_trigger must come with a trigger id*/
3429    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3430        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3431        cam_trigger_t af_trigger;
3432        af_trigger.trigger =
3433            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3434        af_trigger.trigger_id =
3435            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3436        rc = AddSetParmEntryToBatch(mParameters,
3437                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3438    }
3439
3440    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3441        int32_t demosaic =
3442            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3443        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3444                sizeof(demosaic), &demosaic);
3445    }
3446
3447    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3448        cam_edge_application_t edge_application;
3449        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3450        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3451            edge_application.sharpness = 0;
3452        } else {
3453            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3454                int32_t edgeStrength =
3455                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3456                edge_application.sharpness = edgeStrength;
3457            } else {
3458                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3459            }
3460        }
3461        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3462                sizeof(edge_application), &edge_application);
3463    }
3464
3465    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3466        int32_t respectFlashMode = 1;
3467        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3468            uint8_t fwk_aeMode =
3469                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3470            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3471                respectFlashMode = 0;
3472                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3473                    __func__);
3474            }
3475        }
3476        if (respectFlashMode) {
3477            uint8_t flashMode =
3478                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3479            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3480                                          sizeof(FLASH_MODES_MAP),
3481                                          flashMode);
3482            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3483            // To check: CAM_INTF_META_FLASH_MODE usage
3484            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3485                          sizeof(flashMode), &flashMode);
3486        }
3487    }
3488
3489    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3490        uint8_t flashPower =
3491            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3492        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3493                sizeof(flashPower), &flashPower);
3494    }
3495
3496    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3497        int64_t flashFiringTime =
3498            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3499        rc = AddSetParmEntryToBatch(mParameters,
3500                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3501    }
3502
3503    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3504        uint8_t geometricMode =
3505            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3506        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3507                sizeof(geometricMode), &geometricMode);
3508    }
3509
3510    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3511        uint8_t geometricStrength =
3512            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3513        rc = AddSetParmEntryToBatch(mParameters,
3514                CAM_INTF_META_GEOMETRIC_STRENGTH,
3515                sizeof(geometricStrength), &geometricStrength);
3516    }
3517
3518    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3519        uint8_t hotPixelMode =
3520            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3521        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3522                sizeof(hotPixelMode), &hotPixelMode);
3523    }
3524
3525    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3526        float lensAperture =
3527            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3528        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3529                sizeof(lensAperture), &lensAperture);
3530    }
3531
3532    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3533        float filterDensity =
3534            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3535        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3536                sizeof(filterDensity), &filterDensity);
3537    }
3538
3539    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3540        float focalLength =
3541            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3542        rc = AddSetParmEntryToBatch(mParameters,
3543                CAM_INTF_META_LENS_FOCAL_LENGTH,
3544                sizeof(focalLength), &focalLength);
3545    }
3546
3547    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3548        uint8_t optStabMode =
3549            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3550        rc = AddSetParmEntryToBatch(mParameters,
3551                CAM_INTF_META_LENS_OPT_STAB_MODE,
3552                sizeof(optStabMode), &optStabMode);
3553    }
3554
3555    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3556        uint8_t noiseRedMode =
3557            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3558        rc = AddSetParmEntryToBatch(mParameters,
3559                CAM_INTF_META_NOISE_REDUCTION_MODE,
3560                sizeof(noiseRedMode), &noiseRedMode);
3561    }
3562
3563    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3564        uint8_t noiseRedStrength =
3565            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3566        rc = AddSetParmEntryToBatch(mParameters,
3567                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3568                sizeof(noiseRedStrength), &noiseRedStrength);
3569    }
3570
3571    cam_crop_region_t scalerCropRegion;
3572    bool scalerCropSet = false;
3573    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3574        scalerCropRegion.left =
3575            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3576        scalerCropRegion.top =
3577            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3578        scalerCropRegion.width =
3579            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3580        scalerCropRegion.height =
3581            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3582        rc = AddSetParmEntryToBatch(mParameters,
3583                CAM_INTF_META_SCALER_CROP_REGION,
3584                sizeof(scalerCropRegion), &scalerCropRegion);
3585        scalerCropSet = true;
3586    }
3587
3588    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3589        int64_t sensorExpTime =
3590            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3591        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3592        rc = AddSetParmEntryToBatch(mParameters,
3593                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3594                sizeof(sensorExpTime), &sensorExpTime);
3595    }
3596
3597    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3598        int64_t sensorFrameDuration =
3599            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3600        int64_t minFrameDuration = getMinFrameDuration(request);
3601        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3602        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3603            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3604        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3605        rc = AddSetParmEntryToBatch(mParameters,
3606                CAM_INTF_META_SENSOR_FRAME_DURATION,
3607                sizeof(sensorFrameDuration), &sensorFrameDuration);
3608    }
3609
3610    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3611        int32_t sensorSensitivity =
3612            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3613        if (sensorSensitivity <
3614                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3615            sensorSensitivity =
3616                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3617        if (sensorSensitivity >
3618                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3619            sensorSensitivity =
3620                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3621        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3622        rc = AddSetParmEntryToBatch(mParameters,
3623                CAM_INTF_META_SENSOR_SENSITIVITY,
3624                sizeof(sensorSensitivity), &sensorSensitivity);
3625    }
3626
3627    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3628        int32_t shadingMode =
3629            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3630        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3631                sizeof(shadingMode), &shadingMode);
3632    }
3633
3634    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3635        uint8_t shadingStrength =
3636            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3637        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3638                sizeof(shadingStrength), &shadingStrength);
3639    }
3640
3641    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3642        uint8_t fwk_facedetectMode =
3643            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3644        uint8_t facedetectMode =
3645            lookupHalName(FACEDETECT_MODES_MAP,
3646                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3647        rc = AddSetParmEntryToBatch(mParameters,
3648                CAM_INTF_META_STATS_FACEDETECT_MODE,
3649                sizeof(facedetectMode), &facedetectMode);
3650    }
3651
3652    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3653        uint8_t histogramMode =
3654            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3655        rc = AddSetParmEntryToBatch(mParameters,
3656                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3657                sizeof(histogramMode), &histogramMode);
3658    }
3659
3660    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3661        uint8_t sharpnessMapMode =
3662            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3663        rc = AddSetParmEntryToBatch(mParameters,
3664                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3665                sizeof(sharpnessMapMode), &sharpnessMapMode);
3666    }
3667
3668    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3669        uint8_t tonemapMode =
3670            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3671        rc = AddSetParmEntryToBatch(mParameters,
3672                CAM_INTF_META_TONEMAP_MODE,
3673                sizeof(tonemapMode), &tonemapMode);
3674    }
3675    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3676    /*All tonemap channels will have the same number of points*/
3677    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3678        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3679        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3680        cam_rgb_tonemap_curves tonemapCurves;
3681        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3682
3683        /* ch0 = G*/
3684        int point = 0;
3685        cam_tonemap_curve_t tonemapCurveGreen;
3686        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3687            for (int j = 0; j < 2; j++) {
3688               tonemapCurveGreen.tonemap_points[i][j] =
3689                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3690               point++;
3691            }
3692        }
3693        tonemapCurves.curves[0] = tonemapCurveGreen;
3694
3695        /* ch 1 = B */
3696        point = 0;
3697        cam_tonemap_curve_t tonemapCurveBlue;
3698        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3699            for (int j = 0; j < 2; j++) {
3700               tonemapCurveBlue.tonemap_points[i][j] =
3701                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3702               point++;
3703            }
3704        }
3705        tonemapCurves.curves[1] = tonemapCurveBlue;
3706
3707        /* ch 2 = R */
3708        point = 0;
3709        cam_tonemap_curve_t tonemapCurveRed;
3710        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3711            for (int j = 0; j < 2; j++) {
3712               tonemapCurveRed.tonemap_points[i][j] =
3713                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3714               point++;
3715            }
3716        }
3717        tonemapCurves.curves[2] = tonemapCurveRed;
3718
3719        rc = AddSetParmEntryToBatch(mParameters,
3720                CAM_INTF_META_TONEMAP_CURVES,
3721                sizeof(tonemapCurves), &tonemapCurves);
3722    }
3723
3724    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3725        uint8_t captureIntent =
3726            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3727        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3728                sizeof(captureIntent), &captureIntent);
3729    }
3730
3731    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3732        uint8_t blackLevelLock =
3733            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3734        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3735                sizeof(blackLevelLock), &blackLevelLock);
3736    }
3737
3738    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3739        uint8_t lensShadingMapMode =
3740            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3741        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3742                sizeof(lensShadingMapMode), &lensShadingMapMode);
3743    }
3744
3745    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3746        cam_area_t roi;
3747        bool reset = true;
3748        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3749        if (scalerCropSet) {
3750            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3751        }
3752        if (reset) {
3753            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3754                    sizeof(roi), &roi);
3755        }
3756    }
3757
3758    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3759        cam_area_t roi;
3760        bool reset = true;
3761        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3762        if (scalerCropSet) {
3763            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3764        }
3765        if (reset) {
3766            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3767                    sizeof(roi), &roi);
3768        }
3769    }
3770
3771    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3772        cam_area_t roi;
3773        bool reset = true;
3774        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3775        if (scalerCropSet) {
3776            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3777        }
3778        if (reset) {
3779            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3780                    sizeof(roi), &roi);
3781        }
3782    }
3783    return rc;
3784}
3785
3786/*===========================================================================
3787 * FUNCTION   : getJpegSettings
3788 *
3789 * DESCRIPTION: save the jpeg settings in the HAL
3790 *
3791 *
3792 * PARAMETERS :
3793 *   @settings  : frame settings information from framework
3794 *
3795 *
3796 * RETURN     : success: NO_ERROR
3797 *              failure:
3798 *==========================================================================*/
3799int QCamera3HardwareInterface::getJpegSettings
3800                                  (const camera_metadata_t *settings)
3801{
3802    if (mJpegSettings) {
3803        if (mJpegSettings->gps_timestamp) {
3804            free(mJpegSettings->gps_timestamp);
3805            mJpegSettings->gps_timestamp = NULL;
3806        }
3807        if (mJpegSettings->gps_coordinates) {
3808            for (int i = 0; i < 3; i++) {
3809                free(mJpegSettings->gps_coordinates[i]);
3810                mJpegSettings->gps_coordinates[i] = NULL;
3811            }
3812        }
3813        free(mJpegSettings);
3814        mJpegSettings = NULL;
3815    }
3816    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3817    CameraMetadata jpeg_settings;
3818    jpeg_settings = settings;
3819
3820    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3821        mJpegSettings->jpeg_orientation =
3822            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3823    } else {
3824        mJpegSettings->jpeg_orientation = 0;
3825    }
3826    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3827        mJpegSettings->jpeg_quality =
3828            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3829    } else {
3830        mJpegSettings->jpeg_quality = 85;
3831    }
3832    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3833        mJpegSettings->thumbnail_size.width =
3834            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3835        mJpegSettings->thumbnail_size.height =
3836            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3837    } else {
3838        mJpegSettings->thumbnail_size.width = 0;
3839        mJpegSettings->thumbnail_size.height = 0;
3840    }
3841    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3842        for (int i = 0; i < 3; i++) {
3843            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3844            *(mJpegSettings->gps_coordinates[i]) =
3845                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3846        }
3847    } else{
3848       for (int i = 0; i < 3; i++) {
3849            mJpegSettings->gps_coordinates[i] = NULL;
3850        }
3851    }
3852
3853    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3854        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3855        *(mJpegSettings->gps_timestamp) =
3856            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3857    } else {
3858        mJpegSettings->gps_timestamp = NULL;
3859    }
3860
3861    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3862        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3863        for (int i = 0; i < len; i++) {
3864            mJpegSettings->gps_processing_method[i] =
3865                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3866        }
3867        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3868            mJpegSettings->gps_processing_method[len] = '\0';
3869        }
3870    } else {
3871        mJpegSettings->gps_processing_method[0] = '\0';
3872    }
3873
3874    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3875        mJpegSettings->sensor_sensitivity =
3876            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3877    } else {
3878        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3879    }
3880
3881    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3882
3883    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3884        mJpegSettings->lens_focal_length =
3885            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3886    }
3887    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3888        mJpegSettings->exposure_compensation =
3889            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3890    }
3891    mJpegSettings->sharpness = 10; //default value
3892    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3893        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3894        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3895            mJpegSettings->sharpness = 0;
3896        }
3897    }
3898    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3899    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3900    mJpegSettings->is_jpeg_format = true;
3901    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3902    return 0;
3903}
3904
3905/*===========================================================================
3906 * FUNCTION   : captureResultCb
3907 *
3908 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3909 *
3910 * PARAMETERS :
3911 *   @frame  : frame information from mm-camera-interface
3912 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3913 *   @userdata: userdata
3914 *
3915 * RETURN     : NONE
3916 *==========================================================================*/
3917void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3918                camera3_stream_buffer_t *buffer,
3919                uint32_t frame_number, void *userdata)
3920{
3921    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3922    if (hw == NULL) {
3923        ALOGE("%s: Invalid hw %p", __func__, hw);
3924        return;
3925    }
3926
3927    hw->captureResultCb(metadata, buffer, frame_number);
3928    return;
3929}
3930
3931
3932/*===========================================================================
3933 * FUNCTION   : initialize
3934 *
3935 * DESCRIPTION: Pass framework callback pointers to HAL
3936 *
3937 * PARAMETERS :
3938 *
3939 *
3940 * RETURN     : Success : 0
3941 *              Failure: -ENODEV
3942 *==========================================================================*/
3943
3944int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3945                                  const camera3_callback_ops_t *callback_ops)
3946{
3947    ALOGV("%s: E", __func__);
3948    QCamera3HardwareInterface *hw =
3949        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3950    if (!hw) {
3951        ALOGE("%s: NULL camera device", __func__);
3952        return -ENODEV;
3953    }
3954
3955    int rc = hw->initialize(callback_ops);
3956    ALOGV("%s: X", __func__);
3957    return rc;
3958}
3959
3960/*===========================================================================
3961 * FUNCTION   : configure_streams
3962 *
3963 * DESCRIPTION:
3964 *
3965 * PARAMETERS :
3966 *
3967 *
3968 * RETURN     : Success: 0
3969 *              Failure: -EINVAL (if stream configuration is invalid)
3970 *                       -ENODEV (fatal error)
3971 *==========================================================================*/
3972
3973int QCamera3HardwareInterface::configure_streams(
3974        const struct camera3_device *device,
3975        camera3_stream_configuration_t *stream_list)
3976{
3977    ALOGV("%s: E", __func__);
3978    QCamera3HardwareInterface *hw =
3979        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3980    if (!hw) {
3981        ALOGE("%s: NULL camera device", __func__);
3982        return -ENODEV;
3983    }
3984    int rc = hw->configureStreams(stream_list);
3985    ALOGV("%s: X", __func__);
3986    return rc;
3987}
3988
3989/*===========================================================================
3990 * FUNCTION   : register_stream_buffers
3991 *
3992 * DESCRIPTION: Register stream buffers with the device
3993 *
3994 * PARAMETERS :
3995 *
3996 * RETURN     :
3997 *==========================================================================*/
3998int QCamera3HardwareInterface::register_stream_buffers(
3999        const struct camera3_device *device,
4000        const camera3_stream_buffer_set_t *buffer_set)
4001{
4002    ALOGV("%s: E", __func__);
4003    QCamera3HardwareInterface *hw =
4004        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4005    if (!hw) {
4006        ALOGE("%s: NULL camera device", __func__);
4007        return -ENODEV;
4008    }
4009    int rc = hw->registerStreamBuffers(buffer_set);
4010    ALOGV("%s: X", __func__);
4011    return rc;
4012}
4013
4014/*===========================================================================
4015 * FUNCTION   : construct_default_request_settings
4016 *
4017 * DESCRIPTION: Configure a settings buffer to meet the required use case
4018 *
4019 * PARAMETERS :
4020 *
4021 *
4022 * RETURN     : Success: Return valid metadata
4023 *              Failure: Return NULL
4024 *==========================================================================*/
4025const camera_metadata_t* QCamera3HardwareInterface::
4026    construct_default_request_settings(const struct camera3_device *device,
4027                                        int type)
4028{
4029
4030    ALOGV("%s: E", __func__);
4031    camera_metadata_t* fwk_metadata = NULL;
4032    QCamera3HardwareInterface *hw =
4033        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4034    if (!hw) {
4035        ALOGE("%s: NULL camera device", __func__);
4036        return NULL;
4037    }
4038
4039    fwk_metadata = hw->translateCapabilityToMetadata(type);
4040
4041    ALOGV("%s: X", __func__);
4042    return fwk_metadata;
4043}
4044
4045/*===========================================================================
4046 * FUNCTION   : process_capture_request
4047 *
4048 * DESCRIPTION:
4049 *
4050 * PARAMETERS :
4051 *
4052 *
4053 * RETURN     :
4054 *==========================================================================*/
4055int QCamera3HardwareInterface::process_capture_request(
4056                    const struct camera3_device *device,
4057                    camera3_capture_request_t *request)
4058{
4059    ALOGV("%s: E", __func__);
4060    QCamera3HardwareInterface *hw =
4061        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4062    if (!hw) {
4063        ALOGE("%s: NULL camera device", __func__);
4064        return -EINVAL;
4065    }
4066
4067    int rc = hw->processCaptureRequest(request);
4068    ALOGV("%s: X", __func__);
4069    return rc;
4070}
4071
4072/*===========================================================================
4073 * FUNCTION   : get_metadata_vendor_tag_ops
4074 *
4075 * DESCRIPTION:
4076 *
4077 * PARAMETERS :
4078 *
4079 *
4080 * RETURN     :
4081 *==========================================================================*/
4082
4083void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4084                const struct camera3_device *device,
4085                vendor_tag_query_ops_t* ops)
4086{
4087    ALOGV("%s: E", __func__);
4088    QCamera3HardwareInterface *hw =
4089        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4090    if (!hw) {
4091        ALOGE("%s: NULL camera device", __func__);
4092        return;
4093    }
4094
4095    hw->getMetadataVendorTagOps(ops);
4096    ALOGV("%s: X", __func__);
4097    return;
4098}
4099
4100/*===========================================================================
4101 * FUNCTION   : dump
4102 *
4103 * DESCRIPTION:
4104 *
4105 * PARAMETERS :
4106 *
4107 *
4108 * RETURN     :
4109 *==========================================================================*/
4110
4111void QCamera3HardwareInterface::dump(
4112                const struct camera3_device *device, int fd)
4113{
4114    ALOGV("%s: E", __func__);
4115    QCamera3HardwareInterface *hw =
4116        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4117    if (!hw) {
4118        ALOGE("%s: NULL camera device", __func__);
4119        return;
4120    }
4121
4122    hw->dump(fd);
4123    ALOGV("%s: X", __func__);
4124    return;
4125}
4126
4127/*===========================================================================
4128 * FUNCTION   : flush
4129 *
4130 * DESCRIPTION:
4131 *
4132 * PARAMETERS :
4133 *
4134 *
4135 * RETURN     :
4136 *==========================================================================*/
4137
4138int QCamera3HardwareInterface::flush(
4139                const struct camera3_device *device)
4140{
4141    int rc;
4142    ALOGV("%s: E", __func__);
4143    QCamera3HardwareInterface *hw =
4144        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4145    if (!hw) {
4146        ALOGE("%s: NULL camera device", __func__);
4147        return -EINVAL;
4148    }
4149
4150    rc = hw->flush();
4151    ALOGV("%s: X", __func__);
4152    return rc;
4153}
4154
4155/*===========================================================================
4156 * FUNCTION   : close_camera_device
4157 *
4158 * DESCRIPTION:
4159 *
4160 * PARAMETERS :
4161 *
4162 *
4163 * RETURN     :
4164 *==========================================================================*/
4165int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4166{
4167    ALOGV("%s: E", __func__);
4168    int ret = NO_ERROR;
4169    QCamera3HardwareInterface *hw =
4170        reinterpret_cast<QCamera3HardwareInterface *>(
4171            reinterpret_cast<camera3_device_t *>(device)->priv);
4172    if (!hw) {
4173        ALOGE("NULL camera device");
4174        return BAD_VALUE;
4175    }
4176    delete hw;
4177
4178    pthread_mutex_lock(&mCameraSessionLock);
4179    mCameraSessionActive = 0;
4180    pthread_mutex_unlock(&mCameraSessionLock);
4181    ALOGV("%s: X", __func__);
4182    return ret;
4183}
4184
4185/*===========================================================================
4186 * FUNCTION   : getWaveletDenoiseProcessPlate
4187 *
4188 * DESCRIPTION: query wavelet denoise process plate
4189 *
4190 * PARAMETERS : None
4191 *
4192 * RETURN     : WNR prcocess plate vlaue
4193 *==========================================================================*/
4194cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4195{
4196    char prop[PROPERTY_VALUE_MAX];
4197    memset(prop, 0, sizeof(prop));
4198    property_get("persist.denoise.process.plates", prop, "0");
4199    int processPlate = atoi(prop);
4200    switch(processPlate) {
4201    case 0:
4202        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4203    case 1:
4204        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4205    case 2:
4206        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4207    case 3:
4208        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4209    default:
4210        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4211    }
4212}
4213
4214/*===========================================================================
4215 * FUNCTION   : needRotationReprocess
4216 *
4217 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4218 *
4219 * PARAMETERS : none
4220 *
4221 * RETURN     : true: needed
4222 *              false: no need
4223 *==========================================================================*/
4224bool QCamera3HardwareInterface::needRotationReprocess()
4225{
4226
4227    if (!mJpegSettings->is_jpeg_format) {
4228        // RAW image, no need to reprocess
4229        return false;
4230    }
4231
4232    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4233        mJpegSettings->jpeg_orientation > 0) {
4234        // current rotation is not zero, and pp has the capability to process rotation
4235        ALOGD("%s: need do reprocess for rotation", __func__);
4236        return true;
4237    }
4238
4239    return false;
4240}
4241
4242/*===========================================================================
4243 * FUNCTION   : needReprocess
4244 *
4245 * DESCRIPTION: if reprocess in needed
4246 *
4247 * PARAMETERS : none
4248 *
4249 * RETURN     : true: needed
4250 *              false: no need
4251 *==========================================================================*/
4252bool QCamera3HardwareInterface::needReprocess()
4253{
4254    if (!mJpegSettings->is_jpeg_format) {
4255        // RAW image, no need to reprocess
4256        return false;
4257    }
4258
4259    if ((mJpegSettings->min_required_pp_mask > 0) ||
4260         isWNREnabled()) {
4261        // TODO: add for ZSL HDR later
4262        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4263        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4264        return true;
4265    }
4266    return needRotationReprocess();
4267}
4268
4269/*===========================================================================
4270 * FUNCTION   : addOnlineReprocChannel
4271 *
4272 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4273 *              coming from input channel
4274 *
4275 * PARAMETERS :
4276 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4277 *
4278 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4279 *==========================================================================*/
4280QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4281              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4282{
4283    int32_t rc = NO_ERROR;
4284    QCamera3ReprocessChannel *pChannel = NULL;
4285    if (pInputChannel == NULL) {
4286        ALOGE("%s: input channel obj is NULL", __func__);
4287        return NULL;
4288    }
4289
4290    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4291            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4292    if (NULL == pChannel) {
4293        ALOGE("%s: no mem for reprocess channel", __func__);
4294        return NULL;
4295    }
4296
4297    // Capture channel, only need snapshot and postview streams start together
4298    mm_camera_channel_attr_t attr;
4299    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4300    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4301    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4302    rc = pChannel->initialize();
4303    if (rc != NO_ERROR) {
4304        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4305        delete pChannel;
4306        return NULL;
4307    }
4308
4309    // pp feature config
4310    cam_pp_feature_config_t pp_config;
4311    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4312    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4313        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4314        pp_config.sharpness = mJpegSettings->sharpness;
4315    }
4316
4317    if (isWNREnabled()) {
4318        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4319        pp_config.denoise2d.denoise_enable = 1;
4320        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4321    }
4322    if (needRotationReprocess()) {
4323        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4324        int rotation = mJpegSettings->jpeg_orientation;
4325        if (rotation == 0) {
4326            pp_config.rotation = ROTATE_0;
4327        } else if (rotation == 90) {
4328            pp_config.rotation = ROTATE_90;
4329        } else if (rotation == 180) {
4330            pp_config.rotation = ROTATE_180;
4331        } else if (rotation == 270) {
4332            pp_config.rotation = ROTATE_270;
4333        }
4334    }
4335
4336   rc = pChannel->addReprocStreamsFromSource(pp_config,
4337                                             pInputChannel,
4338                                             mMetadataChannel);
4339
4340    if (rc != NO_ERROR) {
4341        delete pChannel;
4342        return NULL;
4343    }
4344    return pChannel;
4345}
4346
4347int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4348{
4349    return gCamCapability[mCameraId]->min_num_pp_bufs;
4350}
4351
4352bool QCamera3HardwareInterface::isWNREnabled() {
4353    return gCamCapability[mCameraId]->isWnrSupported;
4354}
4355
4356}; //end namespace qcamera
4357