QCamera3HWI.cpp revision 514dd60f8bafe2248fc4200cd691632b1a9eab0f
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
87    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
88    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
89    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
91    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
92    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
93    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
94    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
95    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
96    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
97    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
98    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
99    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
100    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
101    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
105    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
106    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
107    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
108    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
110    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
111};
112
113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
121    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
124    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
126};
127
128const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
129    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
130    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
131    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
132};
133
134const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
136    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
137};
138
139const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
140                                             320, 240, 176, 144, 0, 0};
141
142camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
143    initialize:                         QCamera3HardwareInterface::initialize,
144    configure_streams:                  QCamera3HardwareInterface::configure_streams,
145    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
146    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
147    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
148    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
149    dump:                               QCamera3HardwareInterface::dump,
150    flush:                              QCamera3HardwareInterface::flush,
151    reserved:                           {0},
152};
153
154
155/*===========================================================================
156 * FUNCTION   : QCamera3HardwareInterface
157 *
158 * DESCRIPTION: constructor of QCamera3HardwareInterface
159 *
160 * PARAMETERS :
161 *   @cameraId  : camera ID
162 *
163 * RETURN     : none
164 *==========================================================================*/
165QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
166    : mCameraId(cameraId),
167      mCameraHandle(NULL),
168      mCameraOpened(false),
169      mCameraInitialized(false),
170      mCallbackOps(NULL),
171      mInputStream(NULL),
172      mMetadataChannel(NULL),
173      mPictureChannel(NULL),
174      mFirstRequest(false),
175      mParamHeap(NULL),
176      mParameters(NULL),
177      mJpegSettings(NULL),
178      mIsZslMode(false),
179      mMinProcessedFrameDuration(0),
180      mMinJpegFrameDuration(0),
181      mMinRawFrameDuration(0),
182      m_pPowerModule(NULL)
183{
184    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
185    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
186    mCameraDevice.common.close = close_camera_device;
187    mCameraDevice.ops = &mCameraOps;
188    mCameraDevice.priv = this;
189    gCamCapability[cameraId]->version = CAM_HAL_V3;
190    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
191    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
192    gCamCapability[cameraId]->min_num_pp_bufs = 3;
193
194    pthread_cond_init(&mRequestCond, NULL);
195    mPendingRequest = 0;
196    mCurrentRequestId = -1;
197    pthread_mutex_init(&mMutex, NULL);
198
199    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
200        mDefaultMetadata[i] = NULL;
201
202#ifdef HAS_MULTIMEDIA_HINTS
203    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
204        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
205    }
206#endif
207}
208
209/*===========================================================================
210 * FUNCTION   : ~QCamera3HardwareInterface
211 *
212 * DESCRIPTION: destructor of QCamera3HardwareInterface
213 *
214 * PARAMETERS : none
215 *
216 * RETURN     : none
217 *==========================================================================*/
218QCamera3HardwareInterface::~QCamera3HardwareInterface()
219{
220    ALOGV("%s: E", __func__);
221    /* We need to stop all streams before deleting any stream */
222        /*flush the metadata list*/
223    if (!mStoredMetadataList.empty()) {
224        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
225              m != mStoredMetadataList.end(); m++) {
226            mMetadataChannel->bufDone(m->meta_buf);
227            free(m->meta_buf);
228            m = mStoredMetadataList.erase(m);
229        }
230    }
231
232    // NOTE: 'camera3_stream_t *' objects are already freed at
233    //        this stage by the framework
234    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
235        it != mStreamInfo.end(); it++) {
236        QCamera3Channel *channel = (*it)->channel;
237        if (channel) {
238            channel->stop();
239        }
240    }
241
242    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
243        it != mStreamInfo.end(); it++) {
244        QCamera3Channel *channel = (*it)->channel;
245        if ((*it)->registered && (*it)->buffer_set.buffers) {
246             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
247        }
248        if (channel)
249            delete channel;
250        free (*it);
251    }
252
253    mPictureChannel = NULL;
254
255    if (mJpegSettings != NULL) {
256        free(mJpegSettings);
257        mJpegSettings = NULL;
258    }
259
260    /* Clean up all channels */
261    if (mCameraInitialized) {
262        if (mMetadataChannel) {
263            mMetadataChannel->stop();
264            delete mMetadataChannel;
265            mMetadataChannel = NULL;
266        }
267        deinitParameters();
268    }
269
270    if (mCameraOpened)
271        closeCamera();
272
273    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
274        if (mDefaultMetadata[i])
275            free_camera_metadata(mDefaultMetadata[i]);
276
277    pthread_cond_destroy(&mRequestCond);
278
279    pthread_mutex_destroy(&mMutex);
280    ALOGV("%s: X", __func__);
281}
282
283/*===========================================================================
284 * FUNCTION   : openCamera
285 *
286 * DESCRIPTION: open camera
287 *
288 * PARAMETERS :
289 *   @hw_device  : double ptr for camera device struct
290 *
291 * RETURN     : int32_t type of status
292 *              NO_ERROR  -- success
293 *              none-zero failure code
294 *==========================================================================*/
295int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
296{
297    int rc = 0;
298    pthread_mutex_lock(&mCameraSessionLock);
299    if (mCameraSessionActive) {
300        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
301        pthread_mutex_unlock(&mCameraSessionLock);
302        return -EUSERS;
303    }
304
305    if (mCameraOpened) {
306        *hw_device = NULL;
307        return PERMISSION_DENIED;
308    }
309
310    rc = openCamera();
311    if (rc == 0) {
312        *hw_device = &mCameraDevice.common;
313        mCameraSessionActive = 1;
314    } else
315        *hw_device = NULL;
316
317#ifdef HAS_MULTIMEDIA_HINTS
318    if (rc == 0) {
319        if (m_pPowerModule) {
320            if (m_pPowerModule->powerHint) {
321                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
322                        (void *)"state=1");
323            }
324        }
325    }
326#endif
327    pthread_mutex_unlock(&mCameraSessionLock);
328    return rc;
329}
330
331/*===========================================================================
332 * FUNCTION   : openCamera
333 *
334 * DESCRIPTION: open camera
335 *
336 * PARAMETERS : none
337 *
338 * RETURN     : int32_t type of status
339 *              NO_ERROR  -- success
340 *              none-zero failure code
341 *==========================================================================*/
342int QCamera3HardwareInterface::openCamera()
343{
344    if (mCameraHandle) {
345        ALOGE("Failure: Camera already opened");
346        return ALREADY_EXISTS;
347    }
348    mCameraHandle = camera_open(mCameraId);
349    if (!mCameraHandle) {
350        ALOGE("camera_open failed.");
351        return UNKNOWN_ERROR;
352    }
353
354    mCameraOpened = true;
355
356    return NO_ERROR;
357}
358
359/*===========================================================================
360 * FUNCTION   : closeCamera
361 *
362 * DESCRIPTION: close camera
363 *
364 * PARAMETERS : none
365 *
366 * RETURN     : int32_t type of status
367 *              NO_ERROR  -- success
368 *              none-zero failure code
369 *==========================================================================*/
370int QCamera3HardwareInterface::closeCamera()
371{
372    int rc = NO_ERROR;
373
374    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
375    mCameraHandle = NULL;
376    mCameraOpened = false;
377
378#ifdef HAS_MULTIMEDIA_HINTS
379    if (rc == NO_ERROR) {
380        if (m_pPowerModule) {
381            if (m_pPowerModule->powerHint) {
382                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
383                        (void *)"state=0");
384            }
385        }
386    }
387#endif
388
389    return rc;
390}
391
392/*===========================================================================
393 * FUNCTION   : initialize
394 *
395 * DESCRIPTION: Initialize frameworks callback functions
396 *
397 * PARAMETERS :
398 *   @callback_ops : callback function to frameworks
399 *
400 * RETURN     :
401 *
402 *==========================================================================*/
403int QCamera3HardwareInterface::initialize(
404        const struct camera3_callback_ops *callback_ops)
405{
406    int rc;
407
408    pthread_mutex_lock(&mMutex);
409
410    rc = initParameters();
411    if (rc < 0) {
412        ALOGE("%s: initParamters failed %d", __func__, rc);
413       goto err1;
414    }
415    mCallbackOps = callback_ops;
416
417    pthread_mutex_unlock(&mMutex);
418    mCameraInitialized = true;
419    return 0;
420
421err1:
422    pthread_mutex_unlock(&mMutex);
423    return rc;
424}
425
426/*===========================================================================
427 * FUNCTION   : configureStreams
428 *
429 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
430 *              and output streams.
431 *
432 * PARAMETERS :
433 *   @stream_list : streams to be configured
434 *
435 * RETURN     :
436 *
437 *==========================================================================*/
438int QCamera3HardwareInterface::configureStreams(
439        camera3_stream_configuration_t *streamList)
440{
441    int rc = 0;
442    mIsZslMode = false;
443
444    // Sanity check stream_list
445    if (streamList == NULL) {
446        ALOGE("%s: NULL stream configuration", __func__);
447        return BAD_VALUE;
448    }
449    if (streamList->streams == NULL) {
450        ALOGE("%s: NULL stream list", __func__);
451        return BAD_VALUE;
452    }
453
454    if (streamList->num_streams < 1) {
455        ALOGE("%s: Bad number of streams requested: %d", __func__,
456                streamList->num_streams);
457        return BAD_VALUE;
458    }
459
460    /* first invalidate all the steams in the mStreamList
461     * if they appear again, they will be validated */
462    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
463            it != mStreamInfo.end(); it++) {
464        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
465        channel->stop();
466        (*it)->status = INVALID;
467    }
468    if (mMetadataChannel) {
469        /* If content of mStreamInfo is not 0, there is metadata stream */
470        mMetadataChannel->stop();
471    }
472
473    pthread_mutex_lock(&mMutex);
474
475    camera3_stream_t *inputStream = NULL;
476    camera3_stream_t *jpegStream = NULL;
477    cam_stream_size_info_t stream_config_info;
478
479    for (size_t i = 0; i < streamList->num_streams; i++) {
480        camera3_stream_t *newStream = streamList->streams[i];
481        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
482                __func__, newStream->stream_type, newStream->format,
483                 newStream->width, newStream->height);
484        //if the stream is in the mStreamList validate it
485        bool stream_exists = false;
486        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
487                it != mStreamInfo.end(); it++) {
488            if ((*it)->stream == newStream) {
489                QCamera3Channel *channel =
490                    (QCamera3Channel*)(*it)->stream->priv;
491                stream_exists = true;
492                (*it)->status = RECONFIGURE;
493                /*delete the channel object associated with the stream because
494                  we need to reconfigure*/
495                delete channel;
496                (*it)->stream->priv = NULL;
497                (*it)->channel = NULL;
498            }
499        }
500        if (!stream_exists) {
501            //new stream
502            stream_info_t* stream_info;
503            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
504            stream_info->stream = newStream;
505            stream_info->status = VALID;
506            stream_info->registered = 0;
507            stream_info->channel = NULL;
508            mStreamInfo.push_back(stream_info);
509        }
510        if (newStream->stream_type == CAMERA3_STREAM_INPUT
511                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
512            if (inputStream != NULL) {
513                ALOGE("%s: Multiple input streams requested!", __func__);
514                pthread_mutex_unlock(&mMutex);
515                return BAD_VALUE;
516            }
517            inputStream = newStream;
518        }
519        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
520            jpegStream = newStream;
521        }
522    }
523    mInputStream = inputStream;
524
525    /*clean up invalid streams*/
526    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
527            it != mStreamInfo.end();) {
528        if(((*it)->status) == INVALID){
529            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
530            delete channel;
531            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
532            free(*it);
533            it = mStreamInfo.erase(it);
534        } else {
535            it++;
536        }
537    }
538    if (mMetadataChannel) {
539        delete mMetadataChannel;
540        mMetadataChannel = NULL;
541    }
542
543    //Create metadata channel and initialize it
544    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
545                    mCameraHandle->ops, captureResultCb,
546                    &gCamCapability[mCameraId]->padding_info, this);
547    if (mMetadataChannel == NULL) {
548        ALOGE("%s: failed to allocate metadata channel", __func__);
549        rc = -ENOMEM;
550        pthread_mutex_unlock(&mMutex);
551        return rc;
552    }
553    rc = mMetadataChannel->initialize();
554    if (rc < 0) {
555        ALOGE("%s: metadata channel initialization failed", __func__);
556        delete mMetadataChannel;
557        mMetadataChannel = NULL;
558        pthread_mutex_unlock(&mMutex);
559        return rc;
560    }
561
562    /* Allocate channel objects for the requested streams */
563    for (size_t i = 0; i < streamList->num_streams; i++) {
564        camera3_stream_t *newStream = streamList->streams[i];
565        uint32_t stream_usage = newStream->usage;
566        stream_config_info.stream_sizes[i].width = newStream->width;
567        stream_config_info.stream_sizes[i].height = newStream->height;
568        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
569            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
570            //for zsl stream the size is jpeg size
571            stream_config_info.stream_sizes[i].width = jpegStream->width;
572            stream_config_info.stream_sizes[i].height = jpegStream->height;
573            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
574        } else {
575           //for non zsl streams find out the format
576           switch (newStream->format) {
577           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
578              {
579                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
580                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
581                 } else {
582                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
583                 }
584              }
585              break;
586           case HAL_PIXEL_FORMAT_YCbCr_420_888:
587              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
588              break;
589           case HAL_PIXEL_FORMAT_BLOB:
590              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
591              break;
592           default:
593              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
594              break;
595           }
596        }
597        if (newStream->priv == NULL) {
598            //New stream, construct channel
599            switch (newStream->stream_type) {
600            case CAMERA3_STREAM_INPUT:
601                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
602                break;
603            case CAMERA3_STREAM_BIDIRECTIONAL:
604                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
605                    GRALLOC_USAGE_HW_CAMERA_WRITE;
606                break;
607            case CAMERA3_STREAM_OUTPUT:
608                /* For video encoding stream, set read/write rarely
609                 * flag so that they may be set to un-cached */
610                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
611                    newStream->usage =
612                         (GRALLOC_USAGE_SW_READ_RARELY |
613                         GRALLOC_USAGE_SW_WRITE_RARELY |
614                         GRALLOC_USAGE_HW_CAMERA_WRITE);
615                else
616                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
617                break;
618            default:
619                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
620                break;
621            }
622
623            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
624                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
625                QCamera3Channel *channel;
626                switch (newStream->format) {
627                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
628                case HAL_PIXEL_FORMAT_YCbCr_420_888:
629                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
630                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
631                        jpegStream) {
632                        uint32_t width = jpegStream->width;
633                        uint32_t height = jpegStream->height;
634                        mIsZslMode = true;
635                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
636                            mCameraHandle->ops, captureResultCb,
637                            &gCamCapability[mCameraId]->padding_info, this, newStream,
638                            width, height);
639                    } else
640                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
641                            mCameraHandle->ops, captureResultCb,
642                            &gCamCapability[mCameraId]->padding_info, this, newStream);
643                    if (channel == NULL) {
644                        ALOGE("%s: allocation of channel failed", __func__);
645                        pthread_mutex_unlock(&mMutex);
646                        return -ENOMEM;
647                    }
648
649                    newStream->priv = channel;
650                    break;
651                case HAL_PIXEL_FORMAT_BLOB:
652                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
653                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
654                            mCameraHandle->ops, captureResultCb,
655                            &gCamCapability[mCameraId]->padding_info, this, newStream);
656                    if (mPictureChannel == NULL) {
657                        ALOGE("%s: allocation of channel failed", __func__);
658                        pthread_mutex_unlock(&mMutex);
659                        return -ENOMEM;
660                    }
661                    newStream->priv = (QCamera3Channel*)mPictureChannel;
662                    break;
663
664                //TODO: Add support for app consumed format?
665                default:
666                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
667                    break;
668                }
669            }
670
671            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
672                    it != mStreamInfo.end(); it++) {
673                if ((*it)->stream == newStream) {
674                    (*it)->channel = (QCamera3Channel*) newStream->priv;
675                    break;
676                }
677            }
678        } else {
679            // Channel already exists for this stream
680            // Do nothing for now
681        }
682    }
683
684    int32_t hal_version = CAM_HAL_V3;
685    stream_config_info.num_streams = streamList->num_streams;
686
687    // settings/parameters don't carry over for new configureStreams
688    memset(mParameters, 0, sizeof(parm_buffer_t));
689
690    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
691    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
692                sizeof(hal_version), &hal_version);
693
694    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
695                sizeof(stream_config_info), &stream_config_info);
696
697    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
698
699    /*For the streams to be reconfigured we need to register the buffers
700      since the framework wont*/
701    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
702            it != mStreamInfo.end(); it++) {
703        if ((*it)->status == RECONFIGURE) {
704            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
705            /*only register buffers for streams that have already been
706              registered*/
707            if ((*it)->registered) {
708                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
709                        (*it)->buffer_set.buffers);
710                if (rc != NO_ERROR) {
711                    ALOGE("%s: Failed to register the buffers of old stream,\
712                            rc = %d", __func__, rc);
713                }
714                ALOGV("%s: channel %p has %d buffers",
715                        __func__, channel, (*it)->buffer_set.num_buffers);
716            }
717        }
718
719        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
720        if (index == NAME_NOT_FOUND) {
721            mPendingBuffersMap.add((*it)->stream, 0);
722        } else {
723            mPendingBuffersMap.editValueAt(index) = 0;
724        }
725    }
726
727    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
728    mPendingRequestsList.clear();
729
730    mPendingFrameDropList.clear();
731
732    /*flush the metadata list*/
733    if (!mStoredMetadataList.empty()) {
734        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
735              m != mStoredMetadataList.end(); m++) {
736            mMetadataChannel->bufDone(m->meta_buf);
737            free(m->meta_buf);
738            m = mStoredMetadataList.erase(m);
739        }
740    }
741
742    mFirstRequest = true;
743
744    //Get min frame duration for this streams configuration
745    deriveMinFrameDuration();
746
747    pthread_mutex_unlock(&mMutex);
748    return rc;
749}
750
751/*===========================================================================
752 * FUNCTION   : validateCaptureRequest
753 *
754 * DESCRIPTION: validate a capture request from camera service
755 *
756 * PARAMETERS :
757 *   @request : request from framework to process
758 *
759 * RETURN     :
760 *
761 *==========================================================================*/
762int QCamera3HardwareInterface::validateCaptureRequest(
763                    camera3_capture_request_t *request)
764{
765    ssize_t idx = 0;
766    const camera3_stream_buffer_t *b;
767    CameraMetadata meta;
768
769    /* Sanity check the request */
770    if (request == NULL) {
771        ALOGE("%s: NULL capture request", __func__);
772        return BAD_VALUE;
773    }
774
775    uint32_t frameNumber = request->frame_number;
776    if (request->input_buffer != NULL &&
777            request->input_buffer->stream != mInputStream) {
778        ALOGE("%s: Request %d: Input buffer not from input stream!",
779                __FUNCTION__, frameNumber);
780        return BAD_VALUE;
781    }
782    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
783        ALOGE("%s: Request %d: No output buffers provided!",
784                __FUNCTION__, frameNumber);
785        return BAD_VALUE;
786    }
787    if (request->input_buffer != NULL) {
788        b = request->input_buffer;
789        QCamera3Channel *channel =
790            static_cast<QCamera3Channel*>(b->stream->priv);
791        if (channel == NULL) {
792            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
793                    __func__, frameNumber, idx);
794            return BAD_VALUE;
795        }
796        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
797            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
798                    __func__, frameNumber, idx);
799            return BAD_VALUE;
800        }
801        if (b->release_fence != -1) {
802            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
803                    __func__, frameNumber, idx);
804            return BAD_VALUE;
805        }
806        if (b->buffer == NULL) {
807            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
808                    __func__, frameNumber, idx);
809            return BAD_VALUE;
810        }
811    }
812
813    // Validate all buffers
814    b = request->output_buffers;
815    do {
816        QCamera3Channel *channel =
817                static_cast<QCamera3Channel*>(b->stream->priv);
818        if (channel == NULL) {
819            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
820                    __func__, frameNumber, idx);
821            return BAD_VALUE;
822        }
823        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
824            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
825                    __func__, frameNumber, idx);
826            return BAD_VALUE;
827        }
828        if (b->release_fence != -1) {
829            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
830                    __func__, frameNumber, idx);
831            return BAD_VALUE;
832        }
833        if (b->buffer == NULL) {
834            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
835                    __func__, frameNumber, idx);
836            return BAD_VALUE;
837        }
838        idx++;
839        b = request->output_buffers + idx;
840    } while (idx < (ssize_t)request->num_output_buffers);
841
842    return NO_ERROR;
843}
844
845/*===========================================================================
846 * FUNCTION   : deriveMinFrameDuration
847 *
848 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
849 *              on currently configured streams.
850 *
851 * PARAMETERS : NONE
852 *
853 * RETURN     : NONE
854 *
855 *==========================================================================*/
856void QCamera3HardwareInterface::deriveMinFrameDuration()
857{
858    int32_t maxJpegDimension, maxProcessedDimension;
859
860    maxJpegDimension = 0;
861    maxProcessedDimension = 0;
862
863    // Figure out maximum jpeg, processed, and raw dimensions
864    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
865        it != mStreamInfo.end(); it++) {
866
867        // Input stream doesn't have valid stream_type
868        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
869            continue;
870
871        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
872        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
873            if (dimension > maxJpegDimension)
874                maxJpegDimension = dimension;
875        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
876            if (dimension > maxProcessedDimension)
877                maxProcessedDimension = dimension;
878        }
879    }
880
881    //Assume all jpeg dimensions are in processed dimensions.
882    if (maxJpegDimension > maxProcessedDimension)
883        maxProcessedDimension = maxJpegDimension;
884
885    //Find minimum durations for processed, jpeg, and raw
886    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
887    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
888        if (maxProcessedDimension ==
889            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
890            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
891            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
892            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
893            break;
894        }
895    }
896}
897
898/*===========================================================================
899 * FUNCTION   : getMinFrameDuration
900 *
901 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
902 *              and current request configuration.
903 *
904 * PARAMETERS : @request: requset sent by the frameworks
905 *
906 * RETURN     : min farme duration for a particular request
907 *
908 *==========================================================================*/
909int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
910{
911    bool hasJpegStream = false;
912    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
913        const camera3_stream_t *stream = request->output_buffers[i].stream;
914        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
915            hasJpegStream = true;
916    }
917
918    if (!hasJpegStream)
919        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
920    else
921        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
922}
923
924/*===========================================================================
925 * FUNCTION   : registerStreamBuffers
926 *
927 * DESCRIPTION: Register buffers for a given stream with the HAL device.
928 *
929 * PARAMETERS :
930 *   @stream_list : streams to be configured
931 *
932 * RETURN     :
933 *
934 *==========================================================================*/
935int QCamera3HardwareInterface::registerStreamBuffers(
936        const camera3_stream_buffer_set_t *buffer_set)
937{
938    int rc = 0;
939
940    pthread_mutex_lock(&mMutex);
941
942    if (buffer_set == NULL) {
943        ALOGE("%s: Invalid buffer_set parameter.", __func__);
944        pthread_mutex_unlock(&mMutex);
945        return -EINVAL;
946    }
947    if (buffer_set->stream == NULL) {
948        ALOGE("%s: Invalid stream parameter.", __func__);
949        pthread_mutex_unlock(&mMutex);
950        return -EINVAL;
951    }
952    if (buffer_set->num_buffers < 1) {
953        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
954        pthread_mutex_unlock(&mMutex);
955        return -EINVAL;
956    }
957    if (buffer_set->buffers == NULL) {
958        ALOGE("%s: Invalid buffers parameter.", __func__);
959        pthread_mutex_unlock(&mMutex);
960        return -EINVAL;
961    }
962
963    camera3_stream_t *stream = buffer_set->stream;
964    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
965
966    //set the buffer_set in the mStreamInfo array
967    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
968            it != mStreamInfo.end(); it++) {
969        if ((*it)->stream == stream) {
970            uint32_t numBuffers = buffer_set->num_buffers;
971            (*it)->buffer_set.stream = buffer_set->stream;
972            (*it)->buffer_set.num_buffers = numBuffers;
973            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
974            if ((*it)->buffer_set.buffers == NULL) {
975                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
976                pthread_mutex_unlock(&mMutex);
977                return -ENOMEM;
978            }
979            for (size_t j = 0; j < numBuffers; j++){
980                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
981            }
982            (*it)->registered = 1;
983        }
984    }
985    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
986    if (rc < 0) {
987        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
988        pthread_mutex_unlock(&mMutex);
989        return -ENODEV;
990    }
991
992    pthread_mutex_unlock(&mMutex);
993    return NO_ERROR;
994}
995
996/*===========================================================================
997 * FUNCTION   : processCaptureRequest
998 *
999 * DESCRIPTION: process a capture request from camera service
1000 *
1001 * PARAMETERS :
1002 *   @request : request from framework to process
1003 *
1004 * RETURN     :
1005 *
1006 *==========================================================================*/
1007int QCamera3HardwareInterface::processCaptureRequest(
1008                    camera3_capture_request_t *request)
1009{
1010    int rc = NO_ERROR;
1011    int32_t request_id;
1012    CameraMetadata meta;
1013    MetadataBufferInfo reproc_meta;
1014    int queueMetadata = 0;
1015
1016    pthread_mutex_lock(&mMutex);
1017
1018    rc = validateCaptureRequest(request);
1019    if (rc != NO_ERROR) {
1020        ALOGE("%s: incoming request is not valid", __func__);
1021        pthread_mutex_unlock(&mMutex);
1022        return rc;
1023    }
1024
1025    meta = request->settings;
1026
1027    // For first capture request, send capture intent, and
1028    // stream on all streams
1029    if (mFirstRequest) {
1030
1031        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1032            int32_t hal_version = CAM_HAL_V3;
1033            uint8_t captureIntent =
1034                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1035
1036            memset(mParameters, 0, sizeof(parm_buffer_t));
1037            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1038            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1039                sizeof(hal_version), &hal_version);
1040            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1041                sizeof(captureIntent), &captureIntent);
1042            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1043                mParameters);
1044        }
1045
1046        mMetadataChannel->start();
1047        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1048            it != mStreamInfo.end(); it++) {
1049            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1050            channel->start();
1051        }
1052    }
1053
1054    uint32_t frameNumber = request->frame_number;
1055    uint32_t streamTypeMask = 0;
1056
1057    if (meta.exists(ANDROID_REQUEST_ID)) {
1058        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1059        mCurrentRequestId = request_id;
1060        ALOGV("%s: Received request with id: %d",__func__, request_id);
1061    } else if (mFirstRequest || mCurrentRequestId == -1){
1062        ALOGE("%s: Unable to find request id field, \
1063                & no previous id available", __func__);
1064        return NAME_NOT_FOUND;
1065    } else {
1066        ALOGV("%s: Re-using old request id", __func__);
1067        request_id = mCurrentRequestId;
1068    }
1069
1070    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1071                                    __func__, __LINE__,
1072                                    request->num_output_buffers,
1073                                    request->input_buffer,
1074                                    frameNumber);
1075    // Acquire all request buffers first
1076    int blob_request = 0;
1077    for (size_t i = 0; i < request->num_output_buffers; i++) {
1078        const camera3_stream_buffer_t& output = request->output_buffers[i];
1079        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1080        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1081
1082        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1083        //Call function to store local copy of jpeg data for encode params.
1084            blob_request = 1;
1085            rc = getJpegSettings(request->settings);
1086            if (rc < 0) {
1087                ALOGE("%s: failed to get jpeg parameters", __func__);
1088                pthread_mutex_unlock(&mMutex);
1089                return rc;
1090            }
1091        }
1092
1093        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1094        if (rc != OK) {
1095            ALOGE("%s: fence wait failed %d", __func__, rc);
1096            pthread_mutex_unlock(&mMutex);
1097            return rc;
1098        }
1099        streamTypeMask |= channel->getStreamTypeMask();
1100    }
1101
1102    rc = setFrameParameters(request, streamTypeMask);
1103    if (rc < 0) {
1104        ALOGE("%s: fail to set frame parameters", __func__);
1105        pthread_mutex_unlock(&mMutex);
1106        return rc;
1107    }
1108
1109    /* Update pending request list and pending buffers map */
1110    PendingRequestInfo pendingRequest;
1111    pendingRequest.frame_number = frameNumber;
1112    pendingRequest.num_buffers = request->num_output_buffers;
1113    pendingRequest.request_id = request_id;
1114    pendingRequest.blob_request = blob_request;
1115    if (blob_request)
1116        pendingRequest.input_jpeg_settings = *mJpegSettings;
1117    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1118
1119    for (size_t i = 0; i < request->num_output_buffers; i++) {
1120        RequestedBufferInfo requestedBuf;
1121        requestedBuf.stream = request->output_buffers[i].stream;
1122        requestedBuf.buffer = NULL;
1123        pendingRequest.buffers.push_back(requestedBuf);
1124
1125        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1126    }
1127    mPendingRequestsList.push_back(pendingRequest);
1128
1129    // Notify metadata channel we receive a request
1130    mMetadataChannel->request(NULL, frameNumber);
1131
1132    // Call request on other streams
1133    for (size_t i = 0; i < request->num_output_buffers; i++) {
1134        const camera3_stream_buffer_t& output = request->output_buffers[i];
1135        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1136        mm_camera_buf_def_t *pInputBuffer = NULL;
1137
1138        if (channel == NULL) {
1139            ALOGE("%s: invalid channel pointer for stream", __func__);
1140            continue;
1141        }
1142
1143        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1144            QCamera3RegularChannel* inputChannel = NULL;
1145            if(request->input_buffer != NULL){
1146                //Try to get the internal format
1147                inputChannel = (QCamera3RegularChannel*)
1148                    request->input_buffer->stream->priv;
1149                if(inputChannel == NULL ){
1150                    ALOGE("%s: failed to get input channel handle", __func__);
1151                } else {
1152                    pInputBuffer =
1153                        inputChannel->getInternalFormatBuffer(
1154                                request->input_buffer->buffer);
1155                    ALOGD("%s: Input buffer dump",__func__);
1156                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1157                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1158                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1159                    ALOGD("Handle:%p", request->input_buffer->buffer);
1160                    //TODO: need to get corresponding metadata and send it to pproc
1161                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1162                         m != mStoredMetadataList.end(); m++) {
1163                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1164                            reproc_meta.meta_buf = m->meta_buf;
1165                            queueMetadata = 1;
1166                            break;
1167                        }
1168                    }
1169                }
1170            }
1171            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1172                            pInputBuffer,(QCamera3Channel*)inputChannel);
1173            if (queueMetadata) {
1174                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1175            }
1176        } else {
1177            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1178                __LINE__, output.buffer, frameNumber);
1179            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1180                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1181                     m != mStoredMetadataList.end(); m++) {
1182                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1183                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1184                            mMetadataChannel->bufDone(m->meta_buf);
1185                            free(m->meta_buf);
1186                            m = mStoredMetadataList.erase(m);
1187                            break;
1188                        }
1189                   }
1190                }
1191            }
1192            rc = channel->request(output.buffer, frameNumber);
1193        }
1194        if (rc < 0)
1195            ALOGE("%s: request failed", __func__);
1196    }
1197
1198    mFirstRequest = false;
1199    // Added a timed condition wait
1200    struct timespec ts;
1201    uint8_t isValidTimeout = 1;
1202    rc = clock_gettime(CLOCK_REALTIME, &ts);
1203    if (rc < 0) {
1204        isValidTimeout = 0;
1205        ALOGE("%s: Error reading the real time clock!!", __func__);
1206    }
1207    else {
1208        // Make timeout as 5 sec for request to be honored
1209        ts.tv_sec += 5;
1210    }
1211    //Block on conditional variable
1212    mPendingRequest = 1;
1213    while (mPendingRequest == 1) {
1214        if (!isValidTimeout) {
1215            ALOGV("%s: Blocking on conditional wait", __func__);
1216            pthread_cond_wait(&mRequestCond, &mMutex);
1217        }
1218        else {
1219            ALOGV("%s: Blocking on timed conditional wait", __func__);
1220            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1221            if (rc == ETIMEDOUT) {
1222                rc = -ENODEV;
1223                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1224                break;
1225            }
1226        }
1227        ALOGV("%s: Unblocked", __func__);
1228    }
1229
1230    pthread_mutex_unlock(&mMutex);
1231
1232    return rc;
1233}
1234
1235/*===========================================================================
1236 * FUNCTION   : getMetadataVendorTagOps
1237 *
1238 * DESCRIPTION:
1239 *
1240 * PARAMETERS :
1241 *
1242 *
1243 * RETURN     :
1244 *==========================================================================*/
1245void QCamera3HardwareInterface::getMetadataVendorTagOps(
1246                    vendor_tag_query_ops_t* /*ops*/)
1247{
1248    /* Enable locks when we eventually add Vendor Tags */
1249    /*
1250    pthread_mutex_lock(&mMutex);
1251
1252    pthread_mutex_unlock(&mMutex);
1253    */
1254    return;
1255}
1256
1257/*===========================================================================
1258 * FUNCTION   : dump
1259 *
1260 * DESCRIPTION:
1261 *
1262 * PARAMETERS :
1263 *
1264 *
1265 * RETURN     :
1266 *==========================================================================*/
1267void QCamera3HardwareInterface::dump(int /*fd*/)
1268{
1269    /*Enable lock when we implement this function*/
1270    /*
1271    pthread_mutex_lock(&mMutex);
1272
1273    pthread_mutex_unlock(&mMutex);
1274    */
1275    return;
1276}
1277
1278/*===========================================================================
1279 * FUNCTION   : flush
1280 *
1281 * DESCRIPTION:
1282 *
1283 * PARAMETERS :
1284 *
1285 *
1286 * RETURN     :
1287 *==========================================================================*/
1288int QCamera3HardwareInterface::flush()
1289{
1290    /*Enable lock when we implement this function*/
1291    /*
1292    pthread_mutex_lock(&mMutex);
1293
1294    pthread_mutex_unlock(&mMutex);
1295    */
1296    return 0;
1297}
1298
1299/*===========================================================================
1300 * FUNCTION   : captureResultCb
1301 *
1302 * DESCRIPTION: Callback handler for all capture result
1303 *              (streams, as well as metadata)
1304 *
1305 * PARAMETERS :
1306 *   @metadata : metadata information
1307 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1308 *               NULL if metadata.
1309 *
1310 * RETURN     : NONE
1311 *==========================================================================*/
1312void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1313                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1314{
1315    pthread_mutex_lock(&mMutex);
1316
1317    if (metadata_buf) {
1318        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1319        int32_t frame_number_valid = *(int32_t *)
1320            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1321        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1322            CAM_INTF_META_PENDING_REQUESTS, metadata);
1323        uint32_t frame_number = *(uint32_t *)
1324            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1325        const struct timeval *tv = (const struct timeval *)
1326            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1327        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1328            tv->tv_usec * NSEC_PER_USEC;
1329        cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1330            POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1331
1332        if (!frame_number_valid) {
1333            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1334            mMetadataChannel->bufDone(metadata_buf);
1335            free(metadata_buf);
1336            goto done_metadata;
1337        }
1338        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1339                frame_number, capture_time);
1340
1341        // Go through the pending requests info and send shutter/results to frameworks
1342        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1343                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1344            camera3_capture_result_t result;
1345            camera3_notify_msg_t notify_msg;
1346            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1347
1348            // Flush out all entries with less or equal frame numbers.
1349
1350            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1351            //Right now it's the same as metadata timestamp
1352
1353            //TODO: When there is metadata drop, how do we derive the timestamp of
1354            //dropped frames? For now, we fake the dropped timestamp by substracting
1355            //from the reported timestamp
1356            nsecs_t current_capture_time = capture_time -
1357                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1358
1359            // Send shutter notify to frameworks
1360            notify_msg.type = CAMERA3_MSG_SHUTTER;
1361            notify_msg.message.shutter.frame_number = i->frame_number;
1362            notify_msg.message.shutter.timestamp = current_capture_time;
1363            mCallbackOps->notify(mCallbackOps, &notify_msg);
1364            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1365                    i->frame_number, capture_time);
1366
1367            // Check whether any stream buffer corresponding to this is dropped or not
1368            // If dropped, then send the ERROR_BUFFER for the corresponding stream
1369            if (cam_frame_drop.frame_dropped) {
1370                camera3_notify_msg_t notify_msg;
1371                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1372                        j != i->buffers.end(); j++) {
1373                    QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1374                    uint32_t streamTypeMask = channel->getStreamTypeMask();
1375                    if (streamTypeMask & cam_frame_drop.stream_type_mask) {
1376                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1377                        ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
1378                               __func__, i->frame_number, streamTypeMask);
1379                        notify_msg.type = CAMERA3_MSG_ERROR;
1380                        notify_msg.message.error.frame_number = i->frame_number;
1381                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1382                        notify_msg.message.error.error_stream = j->stream;
1383                        mCallbackOps->notify(mCallbackOps, &notify_msg);
1384                        ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
1385                               __func__, i->frame_number, streamTypeMask);
1386                        PendingFrameDropInfo PendingFrameDrop;
1387                        PendingFrameDrop.frame_number=i->frame_number;
1388                        PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
1389                        // Add the Frame drop info to mPendingFrameDropList
1390                        mPendingFrameDropList.push_back(PendingFrameDrop);
1391                    }
1392                }
1393            }
1394
1395            // Send empty metadata with already filled buffers for dropped metadata
1396            // and send valid metadata with already filled buffers for current metadata
1397            if (i->frame_number < frame_number) {
1398                CameraMetadata dummyMetadata;
1399                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1400                        &current_capture_time, 1);
1401                dummyMetadata.update(ANDROID_REQUEST_ID,
1402                        &(i->request_id), 1);
1403                result.result = dummyMetadata.release();
1404            } else {
1405                result.result = translateCbMetadataToResultMetadata(metadata,
1406                        current_capture_time, i->request_id, i->blob_request,
1407                        &(i->input_jpeg_settings));
1408                if (mIsZslMode) {
1409                   int found_metadata = 0;
1410                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1411                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1412                        j != i->buffers.end(); j++) {
1413                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1414                         //check if corresp. zsl already exists in the stored metadata list
1415                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1416                               m != mStoredMetadataList.begin(); m++) {
1417                            if (m->frame_number == frame_number) {
1418                               m->meta_buf = metadata_buf;
1419                               found_metadata = 1;
1420                               break;
1421                            }
1422                         }
1423                         if (!found_metadata) {
1424                            MetadataBufferInfo store_meta_info;
1425                            store_meta_info.meta_buf = metadata_buf;
1426                            store_meta_info.frame_number = frame_number;
1427                            mStoredMetadataList.push_back(store_meta_info);
1428                            found_metadata = 1;
1429                         }
1430                      }
1431                   }
1432                   if (!found_metadata) {
1433                       if (!i->input_buffer_present && i->blob_request) {
1434                          //livesnapshot or fallback non-zsl snapshot case
1435                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1436                                j != i->buffers.end(); j++){
1437                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1438                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1439                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1440                                 break;
1441                              }
1442                         }
1443                       } else {
1444                            //return the metadata immediately
1445                            mMetadataChannel->bufDone(metadata_buf);
1446                            free(metadata_buf);
1447                       }
1448                   }
1449               } else if (!mIsZslMode && i->blob_request) {
1450                   //If it is a blob request then send the metadata to the picture channel
1451                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1452               } else {
1453                   // Return metadata buffer
1454                   mMetadataChannel->bufDone(metadata_buf);
1455                   free(metadata_buf);
1456               }
1457
1458            }
1459            if (!result.result) {
1460                ALOGE("%s: metadata is NULL", __func__);
1461            }
1462            result.frame_number = i->frame_number;
1463            result.num_output_buffers = 0;
1464            result.output_buffers = NULL;
1465            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1466                    j != i->buffers.end(); j++) {
1467                if (j->buffer) {
1468                    result.num_output_buffers++;
1469                }
1470            }
1471
1472            if (result.num_output_buffers > 0) {
1473                camera3_stream_buffer_t *result_buffers =
1474                    new camera3_stream_buffer_t[result.num_output_buffers];
1475                if (!result_buffers) {
1476                    ALOGE("%s: Fatal error: out of memory", __func__);
1477                }
1478                size_t result_buffers_idx = 0;
1479                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1480                     j != i->buffers.end(); j++) {
1481                     if (j->buffer) {
1482                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1483                              m != mPendingFrameDropList.end(); m++) {
1484                              QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1485                              uint32_t streamTypeMask = channel->getStreamTypeMask();
1486                              if((m->stream_type_mask & streamTypeMask) &&
1487                                  (m->frame_number==frame_number)) {
1488                                  j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1489                                  ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1490                                        __func__, frame_number, streamTypeMask);
1491                                  m = mPendingFrameDropList.erase(m);
1492                                  break;
1493                              }
1494                         }
1495                         result_buffers[result_buffers_idx++] = *(j->buffer);
1496                         free(j->buffer);
1497                         j->buffer = NULL;
1498                         mPendingBuffersMap.editValueFor(j->stream)--;
1499                    }
1500                }
1501                result.output_buffers = result_buffers;
1502
1503                mCallbackOps->process_capture_result(mCallbackOps, &result);
1504                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1505                        __func__, result.frame_number, current_capture_time);
1506                free_camera_metadata((camera_metadata_t *)result.result);
1507                delete[] result_buffers;
1508            } else {
1509                mCallbackOps->process_capture_result(mCallbackOps, &result);
1510                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1511                        __func__, result.frame_number, current_capture_time);
1512                free_camera_metadata((camera_metadata_t *)result.result);
1513            }
1514            // erase the element from the list
1515            i = mPendingRequestsList.erase(i);
1516        }
1517
1518
1519done_metadata:
1520        bool max_buffers_dequeued = false;
1521        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1522            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1523            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1524            if (queued_buffers == stream->max_buffers) {
1525                max_buffers_dequeued = true;
1526                break;
1527            }
1528        }
1529        if (!max_buffers_dequeued && !pending_requests) {
1530            // Unblock process_capture_request
1531            mPendingRequest = 0;
1532            pthread_cond_signal(&mRequestCond);
1533        }
1534    } else {
1535        // If the frame number doesn't exist in the pending request list,
1536        // directly send the buffer to the frameworks, and update pending buffers map
1537        // Otherwise, book-keep the buffer.
1538        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1539        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1540            i++;
1541        }
1542        if (i == mPendingRequestsList.end()) {
1543            // Verify all pending requests frame_numbers are greater
1544            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1545                    j != mPendingRequestsList.end(); j++) {
1546                if (j->frame_number < frame_number) {
1547                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1548                            __func__, j->frame_number, frame_number);
1549                }
1550            }
1551            camera3_capture_result_t result;
1552            result.result = NULL;
1553            result.frame_number = frame_number;
1554            result.num_output_buffers = 1;
1555            for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1556                  m != mPendingFrameDropList.end(); m++) {
1557                QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1558                uint32_t streamTypeMask = channel->getStreamTypeMask();
1559                if((m->stream_type_mask & streamTypeMask) &&
1560                    (m->frame_number==frame_number) ) {
1561                    buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1562                    ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1563                            __func__, frame_number, streamTypeMask);
1564                    m = mPendingFrameDropList.erase(m);
1565                    break;
1566                }
1567            }
1568            result.output_buffers = buffer;
1569            ALOGV("%s: result frame_number = %d, buffer = %p",
1570                    __func__, frame_number, buffer);
1571            mPendingBuffersMap.editValueFor(buffer->stream)--;
1572            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1573                int found = 0;
1574                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1575                      k != mStoredMetadataList.end(); k++) {
1576                    if (k->frame_number == frame_number) {
1577                        k->zsl_buf_hdl = buffer->buffer;
1578                        found = 1;
1579                        break;
1580                    }
1581                }
1582                if (!found) {
1583                   MetadataBufferInfo meta_info;
1584                   meta_info.frame_number = frame_number;
1585                   meta_info.zsl_buf_hdl = buffer->buffer;
1586                   mStoredMetadataList.push_back(meta_info);
1587                }
1588            }
1589            mCallbackOps->process_capture_result(mCallbackOps, &result);
1590        } else {
1591            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1592                    j != i->buffers.end(); j++) {
1593                if (j->stream == buffer->stream) {
1594                    if (j->buffer != NULL) {
1595                        ALOGE("%s: Error: buffer is already set", __func__);
1596                    } else {
1597                        j->buffer = (camera3_stream_buffer_t *)malloc(
1598                                sizeof(camera3_stream_buffer_t));
1599                        *(j->buffer) = *buffer;
1600                        ALOGV("%s: cache buffer %p at result frame_number %d",
1601                                __func__, buffer, frame_number);
1602                    }
1603                }
1604            }
1605        }
1606    }
1607    pthread_mutex_unlock(&mMutex);
1608    return;
1609}
1610
1611/*===========================================================================
1612 * FUNCTION   : translateCbMetadataToResultMetadata
1613 *
1614 * DESCRIPTION:
1615 *
1616 * PARAMETERS :
1617 *   @metadata : metadata information from callback
1618 *
1619 * RETURN     : camera_metadata_t*
1620 *              metadata in a format specified by fwk
1621 *==========================================================================*/
1622camera_metadata_t*
1623QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1624                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1625                                 int32_t request_id, int32_t BlobRequest,
1626                                 jpeg_settings_t* inputjpegsettings)
1627{
1628    CameraMetadata camMetadata;
1629    camera_metadata_t* resultMetadata;
1630
1631    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1632    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1633
1634    // Update the JPEG related info
1635    if (BlobRequest) {
1636        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1637        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1638
1639        int32_t thumbnailSizeTable[2];
1640        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1641        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1642        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1643        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1644               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1645
1646        if (inputjpegsettings->gps_coordinates[0]) {
1647            double gpsCoordinates[3];
1648            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1649            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1650            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1651            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1652            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1653                 gpsCoordinates[1],gpsCoordinates[2]);
1654        }
1655
1656        if (inputjpegsettings->gps_timestamp) {
1657            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1658            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1659        }
1660
1661        String8 str(inputjpegsettings->gps_processing_method);
1662        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1663            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1664        }
1665    }
1666    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1667    uint8_t next_entry;
1668    while (curr_entry != CAM_INTF_PARM_MAX) {
1669       switch (curr_entry) {
1670         case CAM_INTF_META_FACE_DETECTION:{
1671             cam_face_detection_data_t *faceDetectionInfo =
1672                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1673             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1674             int32_t faceIds[MAX_ROI];
1675             uint8_t faceScores[MAX_ROI];
1676             int32_t faceRectangles[MAX_ROI * 4];
1677             int32_t faceLandmarks[MAX_ROI * 6];
1678             int j = 0, k = 0;
1679             for (int i = 0; i < numFaces; i++) {
1680                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1681                 faceScores[i] = faceDetectionInfo->faces[i].score;
1682                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1683                         faceRectangles+j, -1);
1684                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1685                 j+= 4;
1686                 k+= 6;
1687             }
1688
1689             if (numFaces <= 0) {
1690                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1691                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1692                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1693                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1694             }
1695
1696             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1697             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1698             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1699               faceRectangles, numFaces*4);
1700             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1701               faceLandmarks, numFaces*6);
1702
1703            break;
1704            }
1705         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1706             uint8_t  *color_correct_mode =
1707                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1708             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1709             break;
1710          }
1711         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1712             int32_t  *ae_precapture_id =
1713                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1714             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1715             break;
1716          }
1717         case CAM_INTF_META_AEC_ROI: {
1718            cam_area_t  *hAeRegions =
1719                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1720             int32_t aeRegions[5];
1721             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1722             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1723             break;
1724          }
1725          case CAM_INTF_META_AEC_STATE:{
1726             uint8_t *ae_state =
1727                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1728             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1729             break;
1730          }
1731          case CAM_INTF_PARM_FOCUS_MODE:{
1732             uint8_t  *focusMode =
1733                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1734             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1735                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1736             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1737             break;
1738          }
1739          case CAM_INTF_META_AF_ROI:{
1740             /*af regions*/
1741             cam_area_t  *hAfRegions =
1742                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1743             int32_t afRegions[5];
1744             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1745             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1746             break;
1747          }
1748          case CAM_INTF_META_AF_STATE: {
1749             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1750             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1751             break;
1752          }
1753          case CAM_INTF_META_AF_TRIGGER_ID: {
1754             int32_t  *afTriggerId =
1755                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1756             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1757             break;
1758          }
1759          case CAM_INTF_PARM_WHITE_BALANCE: {
1760               uint8_t  *whiteBalance =
1761                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1762               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1763                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1764                   *whiteBalance);
1765               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1766               break;
1767          }
1768          case CAM_INTF_META_AWB_REGIONS: {
1769             /*awb regions*/
1770             cam_area_t  *hAwbRegions =
1771                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1772             int32_t awbRegions[5];
1773             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1774             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1775             break;
1776          }
1777          case CAM_INTF_META_AWB_STATE: {
1778             uint8_t  *whiteBalanceState =
1779                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1780             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1781             break;
1782          }
1783          case CAM_INTF_META_MODE: {
1784             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1785             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1786             break;
1787          }
1788          case CAM_INTF_META_EDGE_MODE: {
1789             cam_edge_application_t  *edgeApplication =
1790                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1791             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
1792             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
1793             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
1794             break;
1795          }
1796          case CAM_INTF_META_FLASH_POWER: {
1797             uint8_t  *flashPower =
1798                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1799             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1800             break;
1801          }
1802          case CAM_INTF_META_FLASH_FIRING_TIME: {
1803             int64_t  *flashFiringTime =
1804                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1805             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1806             break;
1807          }
1808          case CAM_INTF_META_FLASH_STATE: {
1809             uint8_t  *flashState =
1810                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1811             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1812             break;
1813          }
1814          case CAM_INTF_META_FLASH_MODE:{
1815             uint8_t *flashMode = (uint8_t*)
1816                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1817             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1818             break;
1819          }
1820          case CAM_INTF_META_HOTPIXEL_MODE: {
1821              uint8_t  *hotPixelMode =
1822                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1823              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1824              break;
1825          }
1826          case CAM_INTF_META_LENS_APERTURE:{
1827             float  *lensAperture =
1828                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1829             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1830             break;
1831          }
1832          case CAM_INTF_META_LENS_FILTERDENSITY: {
1833             float  *filterDensity =
1834                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1835             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1836             break;
1837          }
1838          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1839             float  *focalLength =
1840                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1841             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1842             break;
1843          }
1844          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1845             float  *focusDistance =
1846                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1847             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1848             break;
1849          }
1850          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1851             float  *focusRange =
1852                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1853             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1854             break;
1855          }
1856          case CAM_INTF_META_LENS_STATE: {
1857             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
1858             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
1859             break;
1860          }
1861          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1862             uint8_t  *opticalStab =
1863                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1864             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1865             break;
1866          }
1867          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1868             uint8_t  *noiseRedMode =
1869                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1870             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1871             break;
1872          }
1873          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
1874             uint8_t  *noiseRedStrength =
1875                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
1876             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
1877             break;
1878          }
1879          case CAM_INTF_META_SCALER_CROP_REGION: {
1880             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1881             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1882             int32_t scalerCropRegion[4];
1883             scalerCropRegion[0] = hScalerCropRegion->left;
1884             scalerCropRegion[1] = hScalerCropRegion->top;
1885             scalerCropRegion[2] = hScalerCropRegion->width;
1886             scalerCropRegion[3] = hScalerCropRegion->height;
1887             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1888             break;
1889          }
1890          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1891             int64_t  *sensorExpTime =
1892                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1893             mMetadataResponse.exposure_time = *sensorExpTime;
1894             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1895             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1896             break;
1897          }
1898          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1899             int64_t  *sensorFameDuration =
1900                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1901             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1902             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1903             break;
1904          }
1905          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1906             int32_t  *sensorSensitivity =
1907                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1908             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1909             mMetadataResponse.iso_speed = *sensorSensitivity;
1910             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1911             break;
1912          }
1913          case CAM_INTF_META_SHADING_MODE: {
1914             uint8_t  *shadingMode =
1915                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1916             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1917             break;
1918          }
1919          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1920             uint8_t  *faceDetectMode =
1921                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1922             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1923                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1924                                                        *faceDetectMode);
1925             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1926             break;
1927          }
1928          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1929             uint8_t  *histogramMode =
1930                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1931             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1932             break;
1933          }
1934          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1935               uint8_t  *sharpnessMapMode =
1936                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1937               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1938                                  sharpnessMapMode, 1);
1939               break;
1940           }
1941          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1942               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1943               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1944               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1945                                  (int32_t*)sharpnessMap->sharpness,
1946                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1947               break;
1948          }
1949          case CAM_INTF_META_LENS_SHADING_MAP: {
1950               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1951               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1952               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1953               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1954               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1955                                  (float*)lensShadingMap->lens_shading,
1956                                  4*map_width*map_height);
1957               break;
1958          }
1959          case CAM_INTF_META_TONEMAP_CURVES:{
1960             //Populate CAM_INTF_META_TONEMAP_CURVES
1961             /* ch0 = G, ch 1 = B, ch 2 = R*/
1962             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1963             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1964             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1965                                (float*)tonemap->curves[0].tonemap_points,
1966                                tonemap->tonemap_points_cnt * 2);
1967
1968             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1969                                (float*)tonemap->curves[1].tonemap_points,
1970                                tonemap->tonemap_points_cnt * 2);
1971
1972             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1973                                (float*)tonemap->curves[2].tonemap_points,
1974                                tonemap->tonemap_points_cnt * 2);
1975             break;
1976          }
1977          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1978             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1979             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1980             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1981             break;
1982          }
1983          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1984              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1985              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1986              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1987                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1988              break;
1989          }
1990          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1991             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1992             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1993             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1994                       predColorCorrectionGains->gains, 4);
1995             break;
1996          }
1997          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1998             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1999                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2000             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2001                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2002             break;
2003
2004          }
2005          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2006             uint8_t *blackLevelLock = (uint8_t*)
2007               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2008             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2009             break;
2010          }
2011          case CAM_INTF_META_SCENE_FLICKER:{
2012             uint8_t *sceneFlicker = (uint8_t*)
2013             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2014             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2015             break;
2016          }
2017          case CAM_INTF_PARM_LED_MODE:
2018             break;
2019          case CAM_INTF_PARM_EFFECT: {
2020             uint8_t *effectMode = (uint8_t*)
2021                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2022             uint8_t fwk_effectMode = lookupFwkName(EFFECT_MODES_MAP,
2023                                                    sizeof(EFFECT_MODES_MAP),
2024                                                    *effectMode);
2025             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2026             break;
2027          }
2028          default:
2029             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2030                   __func__, curr_entry);
2031             break;
2032       }
2033       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2034       curr_entry = next_entry;
2035    }
2036    resultMetadata = camMetadata.release();
2037    return resultMetadata;
2038}
2039
2040/*===========================================================================
2041 * FUNCTION   : convertToRegions
2042 *
2043 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2044 *
2045 * PARAMETERS :
2046 *   @rect   : cam_rect_t struct to convert
2047 *   @region : int32_t destination array
2048 *   @weight : if we are converting from cam_area_t, weight is valid
2049 *             else weight = -1
2050 *
2051 *==========================================================================*/
2052void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2053    region[0] = rect.left;
2054    region[1] = rect.top;
2055    region[2] = rect.left + rect.width;
2056    region[3] = rect.top + rect.height;
2057    if (weight > -1) {
2058        region[4] = weight;
2059    }
2060}
2061
2062/*===========================================================================
2063 * FUNCTION   : convertFromRegions
2064 *
2065 * DESCRIPTION: helper method to convert from array to cam_rect_t
2066 *
2067 * PARAMETERS :
2068 *   @rect   : cam_rect_t struct to convert
2069 *   @region : int32_t destination array
2070 *   @weight : if we are converting from cam_area_t, weight is valid
2071 *             else weight = -1
2072 *
2073 *==========================================================================*/
2074void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2075                                                   const camera_metadata_t *settings,
2076                                                   uint32_t tag){
2077    CameraMetadata frame_settings;
2078    frame_settings = settings;
2079    int32_t x_min = frame_settings.find(tag).data.i32[0];
2080    int32_t y_min = frame_settings.find(tag).data.i32[1];
2081    int32_t x_max = frame_settings.find(tag).data.i32[2];
2082    int32_t y_max = frame_settings.find(tag).data.i32[3];
2083    roi->weight = frame_settings.find(tag).data.i32[4];
2084    roi->rect.left = x_min;
2085    roi->rect.top = y_min;
2086    roi->rect.width = x_max - x_min;
2087    roi->rect.height = y_max - y_min;
2088}
2089
2090/*===========================================================================
2091 * FUNCTION   : resetIfNeededROI
2092 *
2093 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2094 *              crop region
2095 *
2096 * PARAMETERS :
2097 *   @roi       : cam_area_t struct to resize
2098 *   @scalerCropRegion : cam_crop_region_t region to compare against
2099 *
2100 *
2101 *==========================================================================*/
2102bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2103                                                 const cam_crop_region_t* scalerCropRegion)
2104{
2105    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2106    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2107    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2108    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2109    if ((roi_x_max < scalerCropRegion->left) ||
2110        (roi_y_max < scalerCropRegion->top)  ||
2111        (roi->rect.left > crop_x_max) ||
2112        (roi->rect.top > crop_y_max)){
2113        return false;
2114    }
2115    if (roi->rect.left < scalerCropRegion->left) {
2116        roi->rect.left = scalerCropRegion->left;
2117    }
2118    if (roi->rect.top < scalerCropRegion->top) {
2119        roi->rect.top = scalerCropRegion->top;
2120    }
2121    if (roi_x_max > crop_x_max) {
2122        roi_x_max = crop_x_max;
2123    }
2124    if (roi_y_max > crop_y_max) {
2125        roi_y_max = crop_y_max;
2126    }
2127    roi->rect.width = roi_x_max - roi->rect.left;
2128    roi->rect.height = roi_y_max - roi->rect.top;
2129    return true;
2130}
2131
2132/*===========================================================================
2133 * FUNCTION   : convertLandmarks
2134 *
2135 * DESCRIPTION: helper method to extract the landmarks from face detection info
2136 *
2137 * PARAMETERS :
2138 *   @face   : cam_rect_t struct to convert
2139 *   @landmarks : int32_t destination array
2140 *
2141 *
2142 *==========================================================================*/
2143void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2144{
2145    landmarks[0] = face.left_eye_center.x;
2146    landmarks[1] = face.left_eye_center.y;
2147    landmarks[2] = face.right_eye_center.x;
2148    landmarks[3] = face.right_eye_center.y;
2149    landmarks[4] = face.mouth_center.x;
2150    landmarks[5] = face.mouth_center.y;
2151}
2152
2153#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2154/*===========================================================================
2155 * FUNCTION   : initCapabilities
2156 *
2157 * DESCRIPTION: initialize camera capabilities in static data struct
2158 *
2159 * PARAMETERS :
2160 *   @cameraId  : camera Id
2161 *
2162 * RETURN     : int32_t type of status
2163 *              NO_ERROR  -- success
2164 *              none-zero failure code
2165 *==========================================================================*/
2166int QCamera3HardwareInterface::initCapabilities(int cameraId)
2167{
2168    int rc = 0;
2169    mm_camera_vtbl_t *cameraHandle = NULL;
2170    QCamera3HeapMemory *capabilityHeap = NULL;
2171
2172    cameraHandle = camera_open(cameraId);
2173    if (!cameraHandle) {
2174        ALOGE("%s: camera_open failed", __func__);
2175        rc = -1;
2176        goto open_failed;
2177    }
2178
2179    capabilityHeap = new QCamera3HeapMemory();
2180    if (capabilityHeap == NULL) {
2181        ALOGE("%s: creation of capabilityHeap failed", __func__);
2182        goto heap_creation_failed;
2183    }
2184    /* Allocate memory for capability buffer */
2185    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2186    if(rc != OK) {
2187        ALOGE("%s: No memory for cappability", __func__);
2188        goto allocate_failed;
2189    }
2190
2191    /* Map memory for capability buffer */
2192    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2193    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2194                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2195                                capabilityHeap->getFd(0),
2196                                sizeof(cam_capability_t));
2197    if(rc < 0) {
2198        ALOGE("%s: failed to map capability buffer", __func__);
2199        goto map_failed;
2200    }
2201
2202    /* Query Capability */
2203    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2204    if(rc < 0) {
2205        ALOGE("%s: failed to query capability",__func__);
2206        goto query_failed;
2207    }
2208    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2209    if (!gCamCapability[cameraId]) {
2210        ALOGE("%s: out of memory", __func__);
2211        goto query_failed;
2212    }
2213    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2214                                        sizeof(cam_capability_t));
2215    rc = 0;
2216
2217query_failed:
2218    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2219                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2220map_failed:
2221    capabilityHeap->deallocate();
2222allocate_failed:
2223    delete capabilityHeap;
2224heap_creation_failed:
2225    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2226    cameraHandle = NULL;
2227open_failed:
2228    return rc;
2229}
2230
2231/*===========================================================================
2232 * FUNCTION   : initParameters
2233 *
2234 * DESCRIPTION: initialize camera parameters
2235 *
2236 * PARAMETERS :
2237 *
2238 * RETURN     : int32_t type of status
2239 *              NO_ERROR  -- success
2240 *              none-zero failure code
2241 *==========================================================================*/
2242int QCamera3HardwareInterface::initParameters()
2243{
2244    int rc = 0;
2245
2246    //Allocate Set Param Buffer
2247    mParamHeap = new QCamera3HeapMemory();
2248    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2249    if(rc != OK) {
2250        rc = NO_MEMORY;
2251        ALOGE("Failed to allocate SETPARM Heap memory");
2252        delete mParamHeap;
2253        mParamHeap = NULL;
2254        return rc;
2255    }
2256
2257    //Map memory for parameters buffer
2258    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2259            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2260            mParamHeap->getFd(0),
2261            sizeof(parm_buffer_t));
2262    if(rc < 0) {
2263        ALOGE("%s:failed to map SETPARM buffer",__func__);
2264        rc = FAILED_TRANSACTION;
2265        mParamHeap->deallocate();
2266        delete mParamHeap;
2267        mParamHeap = NULL;
2268        return rc;
2269    }
2270
2271    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2272    return rc;
2273}
2274
2275/*===========================================================================
2276 * FUNCTION   : deinitParameters
2277 *
2278 * DESCRIPTION: de-initialize camera parameters
2279 *
2280 * PARAMETERS :
2281 *
2282 * RETURN     : NONE
2283 *==========================================================================*/
2284void QCamera3HardwareInterface::deinitParameters()
2285{
2286    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2287            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2288
2289    mParamHeap->deallocate();
2290    delete mParamHeap;
2291    mParamHeap = NULL;
2292
2293    mParameters = NULL;
2294}
2295
2296/*===========================================================================
2297 * FUNCTION   : calcMaxJpegSize
2298 *
2299 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2300 *
2301 * PARAMETERS :
2302 *
2303 * RETURN     : max_jpeg_size
2304 *==========================================================================*/
2305int QCamera3HardwareInterface::calcMaxJpegSize()
2306{
2307    int32_t max_jpeg_size = 0;
2308    int temp_width, temp_height;
2309    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2310        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2311        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2312        if (temp_width * temp_height > max_jpeg_size ) {
2313            max_jpeg_size = temp_width * temp_height;
2314        }
2315    }
2316    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2317    return max_jpeg_size;
2318}
2319
2320/*===========================================================================
2321 * FUNCTION   : initStaticMetadata
2322 *
2323 * DESCRIPTION: initialize the static metadata
2324 *
2325 * PARAMETERS :
2326 *   @cameraId  : camera Id
2327 *
2328 * RETURN     : int32_t type of status
2329 *              0  -- success
2330 *              non-zero failure code
2331 *==========================================================================*/
2332int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2333{
2334    int rc = 0;
2335    CameraMetadata staticInfo;
2336
2337    /* android.info: hardware level */
2338    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2339    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2340        &supportedHardwareLevel, 1);
2341
2342    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2343    /*HAL 3 only*/
2344    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2345                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2346
2347    /*hard coded for now but this should come from sensor*/
2348    float min_focus_distance;
2349    if(facingBack){
2350        min_focus_distance = 10;
2351    } else {
2352        min_focus_distance = 0;
2353    }
2354    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2355                    &min_focus_distance, 1);
2356
2357    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2358                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2359
2360    /*should be using focal lengths but sensor doesn't provide that info now*/
2361    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2362                      &gCamCapability[cameraId]->focal_length,
2363                      1);
2364
2365    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2366                      gCamCapability[cameraId]->apertures,
2367                      gCamCapability[cameraId]->apertures_count);
2368
2369    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2370                gCamCapability[cameraId]->filter_densities,
2371                gCamCapability[cameraId]->filter_densities_count);
2372
2373
2374    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2375                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2376                      gCamCapability[cameraId]->optical_stab_modes_count);
2377
2378    staticInfo.update(ANDROID_LENS_POSITION,
2379                      gCamCapability[cameraId]->lens_position,
2380                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2381
2382    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2383                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2384    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2385                      lens_shading_map_size,
2386                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2387
2388    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2389                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2390    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2391            geo_correction_map_size,
2392            sizeof(geo_correction_map_size)/sizeof(int32_t));
2393
2394    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2395                       gCamCapability[cameraId]->geo_correction_map,
2396                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2397
2398    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2399            gCamCapability[cameraId]->sensor_physical_size, 2);
2400
2401    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2402            gCamCapability[cameraId]->exposure_time_range, 2);
2403
2404    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2405            &gCamCapability[cameraId]->max_frame_duration, 1);
2406
2407    camera_metadata_rational baseGainFactor = {
2408            gCamCapability[cameraId]->base_gain_factor.numerator,
2409            gCamCapability[cameraId]->base_gain_factor.denominator};
2410    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2411                      &baseGainFactor, 1);
2412
2413    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2414                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2415
2416    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2417                                               gCamCapability[cameraId]->pixel_array_size.height};
2418    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2419                      pixel_array_size, 2);
2420
2421    int32_t active_array_size[] = {0, 0,
2422                                                gCamCapability[cameraId]->active_array_size.width,
2423                                                gCamCapability[cameraId]->active_array_size.height};
2424    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2425                      active_array_size, 4);
2426
2427    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2428            &gCamCapability[cameraId]->white_level, 1);
2429
2430    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2431            gCamCapability[cameraId]->black_level_pattern, 4);
2432
2433    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2434                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2435
2436    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2437                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2438
2439    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2440                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2441
2442    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2443                      &gCamCapability[cameraId]->histogram_size, 1);
2444
2445    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2446            &gCamCapability[cameraId]->max_histogram_count, 1);
2447
2448    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2449                                                gCamCapability[cameraId]->sharpness_map_size.height};
2450
2451    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2452            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2453
2454    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2455            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2456
2457
2458    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2459                      &gCamCapability[cameraId]->raw_min_duration,
2460                       1);
2461
2462    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2463                                                HAL_PIXEL_FORMAT_BLOB};
2464    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2465    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2466                      scalar_formats,
2467                      scalar_formats_count);
2468
2469    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2470    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2471              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2472              available_processed_sizes);
2473    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2474                available_processed_sizes,
2475                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2476
2477    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2478                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2479                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2480
2481    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2482    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2483                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2484                 available_fps_ranges);
2485    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2486            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2487
2488    camera_metadata_rational exposureCompensationStep = {
2489            gCamCapability[cameraId]->exp_compensation_step.numerator,
2490            gCamCapability[cameraId]->exp_compensation_step.denominator};
2491    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2492                      &exposureCompensationStep, 1);
2493
2494    /*TO DO*/
2495    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2496    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2497                      availableVstabModes, sizeof(availableVstabModes));
2498
2499    /*HAL 1 and HAL 3 common*/
2500    float maxZoom = 4;
2501    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2502            &maxZoom, 1);
2503
2504    int32_t max3aRegions = 1;
2505    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2506            &max3aRegions, 1);
2507
2508    uint8_t availableFaceDetectModes[] = {
2509            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2510            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2511    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2512                      availableFaceDetectModes,
2513                      sizeof(availableFaceDetectModes));
2514
2515    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2516                                                        gCamCapability[cameraId]->exposure_compensation_max};
2517    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2518            exposureCompensationRange,
2519            sizeof(exposureCompensationRange)/sizeof(int32_t));
2520
2521    uint8_t lensFacing = (facingBack) ?
2522            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2523    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2524
2525    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2526                available_processed_sizes,
2527                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2528
2529    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2530                      available_thumbnail_sizes,
2531                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2532
2533    int32_t max_jpeg_size = 0;
2534    int temp_width, temp_height;
2535    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2536        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2537        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2538        if (temp_width * temp_height > max_jpeg_size ) {
2539            max_jpeg_size = temp_width * temp_height;
2540        }
2541    }
2542    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2543    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2544                      &max_jpeg_size, 1);
2545
2546    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2547    int32_t size = 0;
2548    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2549        int val = lookupFwkName(EFFECT_MODES_MAP,
2550                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2551                                   gCamCapability[cameraId]->supported_effects[i]);
2552        if (val != NAME_NOT_FOUND) {
2553            avail_effects[size] = (uint8_t)val;
2554            size++;
2555        }
2556    }
2557    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2558                      avail_effects,
2559                      size);
2560
2561    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2562    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2563    int32_t supported_scene_modes_cnt = 0;
2564    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2565        int val = lookupFwkName(SCENE_MODES_MAP,
2566                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2567                                gCamCapability[cameraId]->supported_scene_modes[i]);
2568        if (val != NAME_NOT_FOUND) {
2569            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2570            supported_indexes[supported_scene_modes_cnt] = i;
2571            supported_scene_modes_cnt++;
2572        }
2573    }
2574
2575    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2576                      avail_scene_modes,
2577                      supported_scene_modes_cnt);
2578
2579    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2580    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2581                      supported_scene_modes_cnt,
2582                      scene_mode_overrides,
2583                      supported_indexes,
2584                      cameraId);
2585    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2586                      scene_mode_overrides,
2587                      supported_scene_modes_cnt*3);
2588
2589    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2590    size = 0;
2591    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2592        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2593                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2594                                 gCamCapability[cameraId]->supported_antibandings[i]);
2595        if (val != NAME_NOT_FOUND) {
2596            avail_antibanding_modes[size] = (uint8_t)val;
2597            size++;
2598        }
2599
2600    }
2601    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2602                      avail_antibanding_modes,
2603                      size);
2604
2605    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2606    size = 0;
2607    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2608        int val = lookupFwkName(FOCUS_MODES_MAP,
2609                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2610                                gCamCapability[cameraId]->supported_focus_modes[i]);
2611        if (val != NAME_NOT_FOUND) {
2612            avail_af_modes[size] = (uint8_t)val;
2613            size++;
2614        }
2615    }
2616    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2617                      avail_af_modes,
2618                      size);
2619
2620    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2621    size = 0;
2622    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2623        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2624                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2625                                    gCamCapability[cameraId]->supported_white_balances[i]);
2626        if (val != NAME_NOT_FOUND) {
2627            avail_awb_modes[size] = (uint8_t)val;
2628            size++;
2629        }
2630    }
2631    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2632                      avail_awb_modes,
2633                      size);
2634
2635    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2636    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2637      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2638
2639    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2640            available_flash_levels,
2641            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2642
2643
2644    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2645    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2646            &flashAvailable, 1);
2647
2648    uint8_t avail_ae_modes[5];
2649    size = 0;
2650    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2651        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2652        size++;
2653    }
2654    if (flashAvailable) {
2655        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2656        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2657        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2658    }
2659    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2660                      avail_ae_modes,
2661                      size);
2662
2663    int32_t sensitivity_range[2];
2664    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2665    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2666    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2667                      sensitivity_range,
2668                      sizeof(sensitivity_range) / sizeof(int32_t));
2669
2670    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2671                      &gCamCapability[cameraId]->max_analog_sensitivity,
2672                      1);
2673
2674    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2675                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2676                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2677
2678    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2679    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2680                      &sensor_orientation,
2681                      1);
2682
2683    int32_t max_output_streams[3] = {1, 3, 1};
2684    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2685                      max_output_streams,
2686                      3);
2687
2688    gStaticMetadata[cameraId] = staticInfo.release();
2689    return rc;
2690}
2691
2692/*===========================================================================
2693 * FUNCTION   : makeTable
2694 *
2695 * DESCRIPTION: make a table of sizes
2696 *
2697 * PARAMETERS :
2698 *
2699 *
2700 *==========================================================================*/
2701void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2702                                          int32_t* sizeTable)
2703{
2704    int j = 0;
2705    for (int i = 0; i < size; i++) {
2706        sizeTable[j] = dimTable[i].width;
2707        sizeTable[j+1] = dimTable[i].height;
2708        j+=2;
2709    }
2710}
2711
2712/*===========================================================================
2713 * FUNCTION   : makeFPSTable
2714 *
2715 * DESCRIPTION: make a table of fps ranges
2716 *
2717 * PARAMETERS :
2718 *
2719 *==========================================================================*/
2720void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2721                                          int32_t* fpsRangesTable)
2722{
2723    int j = 0;
2724    for (int i = 0; i < size; i++) {
2725        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2726        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2727        j+=2;
2728    }
2729}
2730
2731/*===========================================================================
2732 * FUNCTION   : makeOverridesList
2733 *
2734 * DESCRIPTION: make a list of scene mode overrides
2735 *
2736 * PARAMETERS :
2737 *
2738 *
2739 *==========================================================================*/
2740void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2741                                                  uint8_t size, uint8_t* overridesList,
2742                                                  uint8_t* supported_indexes,
2743                                                  int camera_id)
2744{
2745    /*daemon will give a list of overrides for all scene modes.
2746      However we should send the fwk only the overrides for the scene modes
2747      supported by the framework*/
2748    int j = 0, index = 0, supt = 0;
2749    uint8_t focus_override;
2750    for (int i = 0; i < size; i++) {
2751        supt = 0;
2752        index = supported_indexes[i];
2753        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2754        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2755                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2756                                                    overridesTable[index].awb_mode);
2757        focus_override = (uint8_t)overridesTable[index].af_mode;
2758        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2759           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2760              supt = 1;
2761              break;
2762           }
2763        }
2764        if (supt) {
2765           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2766                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2767                                              focus_override);
2768        } else {
2769           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2770        }
2771        j+=3;
2772    }
2773}
2774
2775/*===========================================================================
2776 * FUNCTION   : getPreviewHalPixelFormat
2777 *
2778 * DESCRIPTION: convert the format to type recognized by framework
2779 *
2780 * PARAMETERS : format : the format from backend
2781 *
2782 ** RETURN    : format recognized by framework
2783 *
2784 *==========================================================================*/
2785int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2786{
2787    int32_t halPixelFormat;
2788
2789    switch (format) {
2790    case CAM_FORMAT_YUV_420_NV12:
2791        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2792        break;
2793    case CAM_FORMAT_YUV_420_NV21:
2794        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2795        break;
2796    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2797        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2798        break;
2799    case CAM_FORMAT_YUV_420_YV12:
2800        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2801        break;
2802    case CAM_FORMAT_YUV_422_NV16:
2803    case CAM_FORMAT_YUV_422_NV61:
2804    default:
2805        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2806        break;
2807    }
2808    return halPixelFormat;
2809}
2810
2811/*===========================================================================
2812 * FUNCTION   : getSensorSensitivity
2813 *
2814 * DESCRIPTION: convert iso_mode to an integer value
2815 *
2816 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2817 *
2818 ** RETURN    : sensitivity supported by sensor
2819 *
2820 *==========================================================================*/
2821int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2822{
2823    int32_t sensitivity;
2824
2825    switch (iso_mode) {
2826    case CAM_ISO_MODE_100:
2827        sensitivity = 100;
2828        break;
2829    case CAM_ISO_MODE_200:
2830        sensitivity = 200;
2831        break;
2832    case CAM_ISO_MODE_400:
2833        sensitivity = 400;
2834        break;
2835    case CAM_ISO_MODE_800:
2836        sensitivity = 800;
2837        break;
2838    case CAM_ISO_MODE_1600:
2839        sensitivity = 1600;
2840        break;
2841    default:
2842        sensitivity = -1;
2843        break;
2844    }
2845    return sensitivity;
2846}
2847
2848
2849/*===========================================================================
2850 * FUNCTION   : AddSetParmEntryToBatch
2851 *
2852 * DESCRIPTION: add set parameter entry into batch
2853 *
2854 * PARAMETERS :
2855 *   @p_table     : ptr to parameter buffer
2856 *   @paramType   : parameter type
2857 *   @paramLength : length of parameter value
2858 *   @paramValue  : ptr to parameter value
2859 *
2860 * RETURN     : int32_t type of status
2861 *              NO_ERROR  -- success
2862 *              none-zero failure code
2863 *==========================================================================*/
2864int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2865                                                          cam_intf_parm_type_t paramType,
2866                                                          uint32_t paramLength,
2867                                                          void *paramValue)
2868{
2869    int position = paramType;
2870    int current, next;
2871
2872    /*************************************************************************
2873    *                 Code to take care of linking next flags                *
2874    *************************************************************************/
2875    current = GET_FIRST_PARAM_ID(p_table);
2876    if (position == current){
2877        //DO NOTHING
2878    } else if (position < current){
2879        SET_NEXT_PARAM_ID(position, p_table, current);
2880        SET_FIRST_PARAM_ID(p_table, position);
2881    } else {
2882        /* Search for the position in the linked list where we need to slot in*/
2883        while (position > GET_NEXT_PARAM_ID(current, p_table))
2884            current = GET_NEXT_PARAM_ID(current, p_table);
2885
2886        /*If node already exists no need to alter linking*/
2887        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2888            next = GET_NEXT_PARAM_ID(current, p_table);
2889            SET_NEXT_PARAM_ID(current, p_table, position);
2890            SET_NEXT_PARAM_ID(position, p_table, next);
2891        }
2892    }
2893
2894    /*************************************************************************
2895    *                   Copy contents into entry                             *
2896    *************************************************************************/
2897
2898    if (paramLength > sizeof(parm_type_t)) {
2899        ALOGE("%s:Size of input larger than max entry size",__func__);
2900        return BAD_VALUE;
2901    }
2902    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2903    return NO_ERROR;
2904}
2905
2906/*===========================================================================
2907 * FUNCTION   : lookupFwkName
2908 *
2909 * DESCRIPTION: In case the enum is not same in fwk and backend
2910 *              make sure the parameter is correctly propogated
2911 *
2912 * PARAMETERS  :
2913 *   @arr      : map between the two enums
2914 *   @len      : len of the map
2915 *   @hal_name : name of the hal_parm to map
2916 *
2917 * RETURN     : int type of status
2918 *              fwk_name  -- success
2919 *              none-zero failure code
2920 *==========================================================================*/
2921int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2922                                             int len, int hal_name)
2923{
2924
2925    for (int i = 0; i < len; i++) {
2926        if (arr[i].hal_name == hal_name)
2927            return arr[i].fwk_name;
2928    }
2929
2930    /* Not able to find matching framework type is not necessarily
2931     * an error case. This happens when mm-camera supports more attributes
2932     * than the frameworks do */
2933    ALOGD("%s: Cannot find matching framework type", __func__);
2934    return NAME_NOT_FOUND;
2935}
2936
2937/*===========================================================================
2938 * FUNCTION   : lookupHalName
2939 *
2940 * DESCRIPTION: In case the enum is not same in fwk and backend
2941 *              make sure the parameter is correctly propogated
2942 *
2943 * PARAMETERS  :
2944 *   @arr      : map between the two enums
2945 *   @len      : len of the map
2946 *   @fwk_name : name of the hal_parm to map
2947 *
2948 * RETURN     : int32_t type of status
2949 *              hal_name  -- success
2950 *              none-zero failure code
2951 *==========================================================================*/
2952int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2953                                             int len, int fwk_name)
2954{
2955    for (int i = 0; i < len; i++) {
2956       if (arr[i].fwk_name == fwk_name)
2957           return arr[i].hal_name;
2958    }
2959    ALOGE("%s: Cannot find matching hal type", __func__);
2960    return NAME_NOT_FOUND;
2961}
2962
2963/*===========================================================================
2964 * FUNCTION   : getCapabilities
2965 *
2966 * DESCRIPTION: query camera capabilities
2967 *
2968 * PARAMETERS :
2969 *   @cameraId  : camera Id
2970 *   @info      : camera info struct to be filled in with camera capabilities
2971 *
2972 * RETURN     : int32_t type of status
2973 *              NO_ERROR  -- success
2974 *              none-zero failure code
2975 *==========================================================================*/
2976int QCamera3HardwareInterface::getCamInfo(int cameraId,
2977                                    struct camera_info *info)
2978{
2979    int rc = 0;
2980
2981    if (NULL == gCamCapability[cameraId]) {
2982        rc = initCapabilities(cameraId);
2983        if (rc < 0) {
2984            //pthread_mutex_unlock(&g_camlock);
2985            return rc;
2986        }
2987    }
2988
2989    if (NULL == gStaticMetadata[cameraId]) {
2990        rc = initStaticMetadata(cameraId);
2991        if (rc < 0) {
2992            return rc;
2993        }
2994    }
2995
2996    switch(gCamCapability[cameraId]->position) {
2997    case CAM_POSITION_BACK:
2998        info->facing = CAMERA_FACING_BACK;
2999        break;
3000
3001    case CAM_POSITION_FRONT:
3002        info->facing = CAMERA_FACING_FRONT;
3003        break;
3004
3005    default:
3006        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3007        rc = -1;
3008        break;
3009    }
3010
3011
3012    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3013    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
3014    info->static_camera_characteristics = gStaticMetadata[cameraId];
3015
3016    return rc;
3017}
3018
3019/*===========================================================================
3020 * FUNCTION   : translateMetadata
3021 *
3022 * DESCRIPTION: translate the metadata into camera_metadata_t
3023 *
3024 * PARAMETERS : type of the request
3025 *
3026 *
3027 * RETURN     : success: camera_metadata_t*
3028 *              failure: NULL
3029 *
3030 *==========================================================================*/
3031camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3032{
3033    pthread_mutex_lock(&mMutex);
3034
3035    if (mDefaultMetadata[type] != NULL) {
3036        pthread_mutex_unlock(&mMutex);
3037        return mDefaultMetadata[type];
3038    }
3039    //first time we are handling this request
3040    //fill up the metadata structure using the wrapper class
3041    CameraMetadata settings;
3042    //translate from cam_capability_t to camera_metadata_tag_t
3043    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3044    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3045    int32_t defaultRequestID = 0;
3046    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3047
3048    /*control*/
3049
3050    uint8_t controlIntent = 0;
3051    switch (type) {
3052      case CAMERA3_TEMPLATE_PREVIEW:
3053        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3054        break;
3055      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3056        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3057        break;
3058      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3059        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3060        break;
3061      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3062        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3063        break;
3064      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3065        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3066        break;
3067      default:
3068        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3069        break;
3070    }
3071    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3072
3073    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3074            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3075
3076    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3077    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3078
3079    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3080    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3081
3082    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3083    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3084
3085    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3086    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3087
3088    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3089    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3090
3091    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3092    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3093
3094    static uint8_t focusMode;
3095    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
3096        ALOGE("%s: Setting focus mode to auto", __func__);
3097        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
3098    } else {
3099        ALOGE("%s: Setting focus mode to off", __func__);
3100        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3101    }
3102    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3103
3104    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3105    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3106
3107    /*flash*/
3108    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3109    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3110
3111    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3112    settings.update(ANDROID_FLASH_FIRING_POWER,
3113            &flashFiringLevel, 1);
3114
3115    /* lens */
3116    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3117    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3118
3119    if (gCamCapability[mCameraId]->filter_densities_count) {
3120        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3121        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3122                        gCamCapability[mCameraId]->filter_densities_count);
3123    }
3124
3125    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3126    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3127
3128    /* Exposure time(Update the Min Exposure Time)*/
3129    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3130    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3131
3132    /* frame duration */
3133    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3134    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3135
3136    /* sensitivity */
3137    static const int32_t default_sensitivity = 100;
3138    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3139
3140    /*edge mode*/
3141    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3142    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3143
3144    /*noise reduction mode*/
3145    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3146    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3147
3148    /*color correction mode*/
3149    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3150    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3151
3152    /*transform matrix mode*/
3153    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3154    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3155
3156    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3157    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3158
3159    int32_t scaler_crop_region[4];
3160    scaler_crop_region[0] = 0;
3161    scaler_crop_region[1] = 0;
3162    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3163    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3164    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3165
3166    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3167    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3168
3169    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3170    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3171
3172    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3173                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3174                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3175    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3176
3177    mDefaultMetadata[type] = settings.release();
3178
3179    pthread_mutex_unlock(&mMutex);
3180    return mDefaultMetadata[type];
3181}
3182
3183/*===========================================================================
3184 * FUNCTION   : setFrameParameters
3185 *
3186 * DESCRIPTION: set parameters per frame as requested in the metadata from
3187 *              framework
3188 *
3189 * PARAMETERS :
3190 *   @request   : request that needs to be serviced
3191 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3192 *
3193 * RETURN     : success: NO_ERROR
3194 *              failure:
3195 *==========================================================================*/
3196int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3197                    uint32_t streamTypeMask)
3198{
3199    /*translate from camera_metadata_t type to parm_type_t*/
3200    int rc = 0;
3201    if (request->settings == NULL && mFirstRequest) {
3202        /*settings cannot be null for the first request*/
3203        return BAD_VALUE;
3204    }
3205
3206    int32_t hal_version = CAM_HAL_V3;
3207
3208    memset(mParameters, 0, sizeof(parm_buffer_t));
3209    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3210    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3211                sizeof(hal_version), &hal_version);
3212    if (rc < 0) {
3213        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3214        return BAD_VALUE;
3215    }
3216
3217    /*we need to update the frame number in the parameters*/
3218    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3219                                sizeof(request->frame_number), &(request->frame_number));
3220    if (rc < 0) {
3221        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3222        return BAD_VALUE;
3223    }
3224
3225    /* Update stream id mask where buffers are requested */
3226    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3227                                sizeof(streamTypeMask), &streamTypeMask);
3228    if (rc < 0) {
3229        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3230        return BAD_VALUE;
3231    }
3232
3233    if(request->settings != NULL){
3234        rc = translateMetadataToParameters(request);
3235    }
3236    /*set the parameters to backend*/
3237    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3238    return rc;
3239}
3240
3241/*===========================================================================
3242 * FUNCTION   : translateMetadataToParameters
3243 *
3244 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3245 *
3246 *
3247 * PARAMETERS :
3248 *   @request  : request sent from framework
3249 *
3250 *
3251 * RETURN     : success: NO_ERROR
3252 *              failure:
3253 *==========================================================================*/
3254int QCamera3HardwareInterface::translateMetadataToParameters
3255                                  (const camera3_capture_request_t *request)
3256{
3257    int rc = 0;
3258    CameraMetadata frame_settings;
3259    frame_settings = request->settings;
3260
3261    /* Do not change the order of the following list unless you know what you are
3262     * doing.
3263     * The order is laid out in such a way that parameters in the front of the table
3264     * may be used to override the parameters later in the table. Examples are:
3265     * 1. META_MODE should precede AEC/AWB/AF MODE
3266     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3267     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3268     * 4. Any mode should precede it's corresponding settings
3269     */
3270    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3271        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3272        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3273                sizeof(metaMode), &metaMode);
3274        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3275           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3276           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3277                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3278                                             fwk_sceneMode);
3279           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3280                sizeof(sceneMode), &sceneMode);
3281        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3282           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3283           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3284                sizeof(sceneMode), &sceneMode);
3285        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3286           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3287           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3288                sizeof(sceneMode), &sceneMode);
3289        }
3290    }
3291
3292    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3293        uint8_t fwk_aeMode =
3294            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3295        uint8_t aeMode;
3296        int32_t redeye;
3297
3298        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3299            aeMode = CAM_AE_MODE_OFF;
3300        } else {
3301            aeMode = CAM_AE_MODE_ON;
3302        }
3303        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3304            redeye = 1;
3305        } else {
3306            redeye = 0;
3307        }
3308
3309        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3310                                          sizeof(AE_FLASH_MODE_MAP),
3311                                          fwk_aeMode);
3312        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3313                sizeof(aeMode), &aeMode);
3314        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3315                sizeof(flashMode), &flashMode);
3316        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3317                sizeof(redeye), &redeye);
3318    }
3319
3320    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3321        uint8_t fwk_whiteLevel =
3322            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3323        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3324                sizeof(WHITE_BALANCE_MODES_MAP),
3325                fwk_whiteLevel);
3326        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3327                sizeof(whiteLevel), &whiteLevel);
3328    }
3329
3330    float focalDistance = -1.0;
3331    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3332        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3333        rc = AddSetParmEntryToBatch(mParameters,
3334                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3335                sizeof(focalDistance), &focalDistance);
3336    }
3337
3338    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3339        uint8_t fwk_focusMode =
3340            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3341        uint8_t focusMode;
3342        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3343            focusMode = CAM_FOCUS_MODE_INFINITY;
3344        } else{
3345         focusMode = lookupHalName(FOCUS_MODES_MAP,
3346                                   sizeof(FOCUS_MODES_MAP),
3347                                   fwk_focusMode);
3348        }
3349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3350                sizeof(focusMode), &focusMode);
3351    }
3352
3353    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3354        int32_t antibandingMode =
3355            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3356        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3357                sizeof(antibandingMode), &antibandingMode);
3358    }
3359
3360    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3361        int32_t expCompensation = frame_settings.find(
3362            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3363        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3364            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3365        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3366            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3367        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3368          sizeof(expCompensation), &expCompensation);
3369    }
3370
3371    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3372        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3373        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3374                sizeof(aeLock), &aeLock);
3375    }
3376    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3377        cam_fps_range_t fps_range;
3378        fps_range.min_fps =
3379            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3380        fps_range.max_fps =
3381            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3382        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3383                sizeof(fps_range), &fps_range);
3384    }
3385
3386    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3387        uint8_t awbLock =
3388            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3389        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3390                sizeof(awbLock), &awbLock);
3391    }
3392
3393    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3394        uint8_t fwk_effectMode =
3395            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3396        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3397                sizeof(EFFECT_MODES_MAP),
3398                fwk_effectMode);
3399        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3400                sizeof(effectMode), &effectMode);
3401    }
3402
3403    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3404        uint8_t colorCorrectMode =
3405            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3406        rc =
3407            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3408                    sizeof(colorCorrectMode), &colorCorrectMode);
3409    }
3410
3411    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3412        cam_color_correct_gains_t colorCorrectGains;
3413        for (int i = 0; i < 4; i++) {
3414            colorCorrectGains.gains[i] =
3415                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3416        }
3417        rc =
3418            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3419                    sizeof(colorCorrectGains), &colorCorrectGains);
3420    }
3421
3422    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3423        cam_color_correct_matrix_t colorCorrectTransform;
3424        cam_rational_type_t transform_elem;
3425        int num = 0;
3426        for (int i = 0; i < 3; i++) {
3427           for (int j = 0; j < 3; j++) {
3428              transform_elem.numerator =
3429                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3430              transform_elem.denominator =
3431                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3432              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3433              num++;
3434           }
3435        }
3436        rc =
3437            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3438                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3439    }
3440
3441    cam_trigger_t aecTrigger;
3442    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3443    aecTrigger.trigger_id = -1;
3444    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3445        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3446        aecTrigger.trigger =
3447            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3448        aecTrigger.trigger_id =
3449            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3450    }
3451    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3452                                sizeof(aecTrigger), &aecTrigger);
3453
3454    /*af_trigger must come with a trigger id*/
3455    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3456        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3457        cam_trigger_t af_trigger;
3458        af_trigger.trigger =
3459            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3460        af_trigger.trigger_id =
3461            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3462        rc = AddSetParmEntryToBatch(mParameters,
3463                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3464    }
3465
3466    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3467        int32_t demosaic =
3468            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3469        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3470                sizeof(demosaic), &demosaic);
3471    }
3472
3473    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3474        cam_edge_application_t edge_application;
3475        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3476        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3477            edge_application.sharpness = 0;
3478        } else {
3479            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3480                uint8_t edgeStrength =
3481                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
3482                edge_application.sharpness = (int32_t)edgeStrength;
3483            } else {
3484                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3485            }
3486        }
3487        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3488                sizeof(edge_application), &edge_application);
3489    }
3490
3491    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3492        int32_t respectFlashMode = 1;
3493        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3494            uint8_t fwk_aeMode =
3495                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3496            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3497                respectFlashMode = 0;
3498                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3499                    __func__);
3500            }
3501        }
3502        if (respectFlashMode) {
3503            uint8_t flashMode =
3504                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3505            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3506                                          sizeof(FLASH_MODES_MAP),
3507                                          flashMode);
3508            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3509            // To check: CAM_INTF_META_FLASH_MODE usage
3510            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3511                          sizeof(flashMode), &flashMode);
3512        }
3513    }
3514
3515    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3516        uint8_t flashPower =
3517            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3518        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3519                sizeof(flashPower), &flashPower);
3520    }
3521
3522    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3523        int64_t flashFiringTime =
3524            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3525        rc = AddSetParmEntryToBatch(mParameters,
3526                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3527    }
3528
3529    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3530        uint8_t geometricMode =
3531            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3532        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3533                sizeof(geometricMode), &geometricMode);
3534    }
3535
3536    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3537        uint8_t geometricStrength =
3538            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3539        rc = AddSetParmEntryToBatch(mParameters,
3540                CAM_INTF_META_GEOMETRIC_STRENGTH,
3541                sizeof(geometricStrength), &geometricStrength);
3542    }
3543
3544    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3545        uint8_t hotPixelMode =
3546            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3547        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3548                sizeof(hotPixelMode), &hotPixelMode);
3549    }
3550
3551    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3552        float lensAperture =
3553            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3554        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3555                sizeof(lensAperture), &lensAperture);
3556    }
3557
3558    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3559        float filterDensity =
3560            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3561        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3562                sizeof(filterDensity), &filterDensity);
3563    }
3564
3565    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3566        float focalLength =
3567            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3568        rc = AddSetParmEntryToBatch(mParameters,
3569                CAM_INTF_META_LENS_FOCAL_LENGTH,
3570                sizeof(focalLength), &focalLength);
3571    }
3572
3573    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3574        uint8_t optStabMode =
3575            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3576        rc = AddSetParmEntryToBatch(mParameters,
3577                CAM_INTF_META_LENS_OPT_STAB_MODE,
3578                sizeof(optStabMode), &optStabMode);
3579    }
3580
3581    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3582        uint8_t noiseRedMode =
3583            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3584        rc = AddSetParmEntryToBatch(mParameters,
3585                CAM_INTF_META_NOISE_REDUCTION_MODE,
3586                sizeof(noiseRedMode), &noiseRedMode);
3587    }
3588
3589    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3590        uint8_t noiseRedStrength =
3591            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3592        rc = AddSetParmEntryToBatch(mParameters,
3593                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3594                sizeof(noiseRedStrength), &noiseRedStrength);
3595    }
3596
3597    cam_crop_region_t scalerCropRegion;
3598    bool scalerCropSet = false;
3599    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3600        scalerCropRegion.left =
3601            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3602        scalerCropRegion.top =
3603            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3604        scalerCropRegion.width =
3605            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3606        scalerCropRegion.height =
3607            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3608        rc = AddSetParmEntryToBatch(mParameters,
3609                CAM_INTF_META_SCALER_CROP_REGION,
3610                sizeof(scalerCropRegion), &scalerCropRegion);
3611        scalerCropSet = true;
3612    }
3613
3614    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3615        int64_t sensorExpTime =
3616            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3617        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3618        rc = AddSetParmEntryToBatch(mParameters,
3619                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3620                sizeof(sensorExpTime), &sensorExpTime);
3621    }
3622
3623    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3624        int64_t sensorFrameDuration =
3625            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3626        int64_t minFrameDuration = getMinFrameDuration(request);
3627        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3628        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3629            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3630        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3631        rc = AddSetParmEntryToBatch(mParameters,
3632                CAM_INTF_META_SENSOR_FRAME_DURATION,
3633                sizeof(sensorFrameDuration), &sensorFrameDuration);
3634    }
3635
3636    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3637        int32_t sensorSensitivity =
3638            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3639        if (sensorSensitivity <
3640                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3641            sensorSensitivity =
3642                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3643        if (sensorSensitivity >
3644                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3645            sensorSensitivity =
3646                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3647        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3648        rc = AddSetParmEntryToBatch(mParameters,
3649                CAM_INTF_META_SENSOR_SENSITIVITY,
3650                sizeof(sensorSensitivity), &sensorSensitivity);
3651    }
3652
3653    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3654        int32_t shadingMode =
3655            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3656        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3657                sizeof(shadingMode), &shadingMode);
3658    }
3659
3660    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3661        uint8_t shadingStrength =
3662            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3663        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3664                sizeof(shadingStrength), &shadingStrength);
3665    }
3666
3667    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3668        uint8_t fwk_facedetectMode =
3669            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3670        uint8_t facedetectMode =
3671            lookupHalName(FACEDETECT_MODES_MAP,
3672                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3673        rc = AddSetParmEntryToBatch(mParameters,
3674                CAM_INTF_META_STATS_FACEDETECT_MODE,
3675                sizeof(facedetectMode), &facedetectMode);
3676    }
3677
3678    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3679        uint8_t histogramMode =
3680            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3681        rc = AddSetParmEntryToBatch(mParameters,
3682                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3683                sizeof(histogramMode), &histogramMode);
3684    }
3685
3686    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3687        uint8_t sharpnessMapMode =
3688            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3689        rc = AddSetParmEntryToBatch(mParameters,
3690                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3691                sizeof(sharpnessMapMode), &sharpnessMapMode);
3692    }
3693
3694    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3695        uint8_t tonemapMode =
3696            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3697        rc = AddSetParmEntryToBatch(mParameters,
3698                CAM_INTF_META_TONEMAP_MODE,
3699                sizeof(tonemapMode), &tonemapMode);
3700    }
3701    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3702    /*All tonemap channels will have the same number of points*/
3703    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3704        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3705        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3706        cam_rgb_tonemap_curves tonemapCurves;
3707        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3708
3709        /* ch0 = G*/
3710        int point = 0;
3711        cam_tonemap_curve_t tonemapCurveGreen;
3712        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3713            for (int j = 0; j < 2; j++) {
3714               tonemapCurveGreen.tonemap_points[i][j] =
3715                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3716               point++;
3717            }
3718        }
3719        tonemapCurves.curves[0] = tonemapCurveGreen;
3720
3721        /* ch 1 = B */
3722        point = 0;
3723        cam_tonemap_curve_t tonemapCurveBlue;
3724        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3725            for (int j = 0; j < 2; j++) {
3726               tonemapCurveBlue.tonemap_points[i][j] =
3727                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3728               point++;
3729            }
3730        }
3731        tonemapCurves.curves[1] = tonemapCurveBlue;
3732
3733        /* ch 2 = R */
3734        point = 0;
3735        cam_tonemap_curve_t tonemapCurveRed;
3736        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3737            for (int j = 0; j < 2; j++) {
3738               tonemapCurveRed.tonemap_points[i][j] =
3739                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3740               point++;
3741            }
3742        }
3743        tonemapCurves.curves[2] = tonemapCurveRed;
3744
3745        rc = AddSetParmEntryToBatch(mParameters,
3746                CAM_INTF_META_TONEMAP_CURVES,
3747                sizeof(tonemapCurves), &tonemapCurves);
3748    }
3749
3750    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3751        uint8_t captureIntent =
3752            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3753        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3754                sizeof(captureIntent), &captureIntent);
3755    }
3756
3757    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3758        uint8_t blackLevelLock =
3759            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3760        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3761                sizeof(blackLevelLock), &blackLevelLock);
3762    }
3763
3764    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3765        uint8_t lensShadingMapMode =
3766            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3767        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3768                sizeof(lensShadingMapMode), &lensShadingMapMode);
3769    }
3770
3771    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3772        cam_area_t roi;
3773        bool reset = true;
3774        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3775        if (scalerCropSet) {
3776            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3777        }
3778        if (reset) {
3779            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3780                    sizeof(roi), &roi);
3781        }
3782    }
3783
3784    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3785        cam_area_t roi;
3786        bool reset = true;
3787        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3788        if (scalerCropSet) {
3789            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3790        }
3791        if (reset) {
3792            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3793                    sizeof(roi), &roi);
3794        }
3795    }
3796
3797    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3798        cam_area_t roi;
3799        bool reset = true;
3800        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3801        if (scalerCropSet) {
3802            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3803        }
3804        if (reset) {
3805            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3806                    sizeof(roi), &roi);
3807        }
3808    }
3809    return rc;
3810}
3811
3812/*===========================================================================
3813 * FUNCTION   : getJpegSettings
3814 *
3815 * DESCRIPTION: save the jpeg settings in the HAL
3816 *
3817 *
3818 * PARAMETERS :
3819 *   @settings  : frame settings information from framework
3820 *
3821 *
3822 * RETURN     : success: NO_ERROR
3823 *              failure:
3824 *==========================================================================*/
3825int QCamera3HardwareInterface::getJpegSettings
3826                                  (const camera_metadata_t *settings)
3827{
3828    if (mJpegSettings) {
3829        if (mJpegSettings->gps_timestamp) {
3830            free(mJpegSettings->gps_timestamp);
3831            mJpegSettings->gps_timestamp = NULL;
3832        }
3833        if (mJpegSettings->gps_coordinates) {
3834            for (int i = 0; i < 3; i++) {
3835                free(mJpegSettings->gps_coordinates[i]);
3836                mJpegSettings->gps_coordinates[i] = NULL;
3837            }
3838        }
3839        free(mJpegSettings);
3840        mJpegSettings = NULL;
3841    }
3842    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3843    CameraMetadata jpeg_settings;
3844    jpeg_settings = settings;
3845
3846    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3847        mJpegSettings->jpeg_orientation =
3848            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3849    } else {
3850        mJpegSettings->jpeg_orientation = 0;
3851    }
3852    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3853        mJpegSettings->jpeg_quality =
3854            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3855    } else {
3856        mJpegSettings->jpeg_quality = 85;
3857    }
3858    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3859        mJpegSettings->thumbnail_size.width =
3860            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3861        mJpegSettings->thumbnail_size.height =
3862            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3863    } else {
3864        mJpegSettings->thumbnail_size.width = 0;
3865        mJpegSettings->thumbnail_size.height = 0;
3866    }
3867    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3868        for (int i = 0; i < 3; i++) {
3869            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3870            *(mJpegSettings->gps_coordinates[i]) =
3871                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3872        }
3873    } else{
3874       for (int i = 0; i < 3; i++) {
3875            mJpegSettings->gps_coordinates[i] = NULL;
3876        }
3877    }
3878
3879    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3880        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3881        *(mJpegSettings->gps_timestamp) =
3882            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3883    } else {
3884        mJpegSettings->gps_timestamp = NULL;
3885    }
3886
3887    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3888        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3889        for (int i = 0; i < len; i++) {
3890            mJpegSettings->gps_processing_method[i] =
3891                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3892        }
3893        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3894            mJpegSettings->gps_processing_method[len] = '\0';
3895        }
3896    } else {
3897        mJpegSettings->gps_processing_method[0] = '\0';
3898    }
3899
3900    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3901        mJpegSettings->sensor_sensitivity =
3902            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3903    } else {
3904        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3905    }
3906
3907    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3908
3909    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3910        mJpegSettings->lens_focal_length =
3911            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3912    }
3913    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3914        mJpegSettings->exposure_compensation =
3915            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3916    }
3917    mJpegSettings->sharpness = 10; //default value
3918    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3919        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3920        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3921            mJpegSettings->sharpness = 0;
3922        }
3923    }
3924    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3925    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3926    mJpegSettings->is_jpeg_format = true;
3927    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3928    return 0;
3929}
3930
3931/*===========================================================================
3932 * FUNCTION   : captureResultCb
3933 *
3934 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3935 *
3936 * PARAMETERS :
3937 *   @frame  : frame information from mm-camera-interface
3938 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3939 *   @userdata: userdata
3940 *
3941 * RETURN     : NONE
3942 *==========================================================================*/
3943void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3944                camera3_stream_buffer_t *buffer,
3945                uint32_t frame_number, void *userdata)
3946{
3947    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3948    if (hw == NULL) {
3949        ALOGE("%s: Invalid hw %p", __func__, hw);
3950        return;
3951    }
3952
3953    hw->captureResultCb(metadata, buffer, frame_number);
3954    return;
3955}
3956
3957
3958/*===========================================================================
3959 * FUNCTION   : initialize
3960 *
3961 * DESCRIPTION: Pass framework callback pointers to HAL
3962 *
3963 * PARAMETERS :
3964 *
3965 *
3966 * RETURN     : Success : 0
3967 *              Failure: -ENODEV
3968 *==========================================================================*/
3969
3970int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3971                                  const camera3_callback_ops_t *callback_ops)
3972{
3973    ALOGV("%s: E", __func__);
3974    QCamera3HardwareInterface *hw =
3975        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3976    if (!hw) {
3977        ALOGE("%s: NULL camera device", __func__);
3978        return -ENODEV;
3979    }
3980
3981    int rc = hw->initialize(callback_ops);
3982    ALOGV("%s: X", __func__);
3983    return rc;
3984}
3985
3986/*===========================================================================
3987 * FUNCTION   : configure_streams
3988 *
3989 * DESCRIPTION:
3990 *
3991 * PARAMETERS :
3992 *
3993 *
3994 * RETURN     : Success: 0
3995 *              Failure: -EINVAL (if stream configuration is invalid)
3996 *                       -ENODEV (fatal error)
3997 *==========================================================================*/
3998
3999int QCamera3HardwareInterface::configure_streams(
4000        const struct camera3_device *device,
4001        camera3_stream_configuration_t *stream_list)
4002{
4003    ALOGV("%s: E", __func__);
4004    QCamera3HardwareInterface *hw =
4005        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4006    if (!hw) {
4007        ALOGE("%s: NULL camera device", __func__);
4008        return -ENODEV;
4009    }
4010    int rc = hw->configureStreams(stream_list);
4011    ALOGV("%s: X", __func__);
4012    return rc;
4013}
4014
4015/*===========================================================================
4016 * FUNCTION   : register_stream_buffers
4017 *
4018 * DESCRIPTION: Register stream buffers with the device
4019 *
4020 * PARAMETERS :
4021 *
4022 * RETURN     :
4023 *==========================================================================*/
4024int QCamera3HardwareInterface::register_stream_buffers(
4025        const struct camera3_device *device,
4026        const camera3_stream_buffer_set_t *buffer_set)
4027{
4028    ALOGV("%s: E", __func__);
4029    QCamera3HardwareInterface *hw =
4030        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4031    if (!hw) {
4032        ALOGE("%s: NULL camera device", __func__);
4033        return -ENODEV;
4034    }
4035    int rc = hw->registerStreamBuffers(buffer_set);
4036    ALOGV("%s: X", __func__);
4037    return rc;
4038}
4039
4040/*===========================================================================
4041 * FUNCTION   : construct_default_request_settings
4042 *
4043 * DESCRIPTION: Configure a settings buffer to meet the required use case
4044 *
4045 * PARAMETERS :
4046 *
4047 *
4048 * RETURN     : Success: Return valid metadata
4049 *              Failure: Return NULL
4050 *==========================================================================*/
4051const camera_metadata_t* QCamera3HardwareInterface::
4052    construct_default_request_settings(const struct camera3_device *device,
4053                                        int type)
4054{
4055
4056    ALOGV("%s: E", __func__);
4057    camera_metadata_t* fwk_metadata = NULL;
4058    QCamera3HardwareInterface *hw =
4059        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4060    if (!hw) {
4061        ALOGE("%s: NULL camera device", __func__);
4062        return NULL;
4063    }
4064
4065    fwk_metadata = hw->translateCapabilityToMetadata(type);
4066
4067    ALOGV("%s: X", __func__);
4068    return fwk_metadata;
4069}
4070
4071/*===========================================================================
4072 * FUNCTION   : process_capture_request
4073 *
4074 * DESCRIPTION:
4075 *
4076 * PARAMETERS :
4077 *
4078 *
4079 * RETURN     :
4080 *==========================================================================*/
4081int QCamera3HardwareInterface::process_capture_request(
4082                    const struct camera3_device *device,
4083                    camera3_capture_request_t *request)
4084{
4085    ALOGV("%s: E", __func__);
4086    QCamera3HardwareInterface *hw =
4087        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4088    if (!hw) {
4089        ALOGE("%s: NULL camera device", __func__);
4090        return -EINVAL;
4091    }
4092
4093    int rc = hw->processCaptureRequest(request);
4094    ALOGV("%s: X", __func__);
4095    return rc;
4096}
4097
4098/*===========================================================================
4099 * FUNCTION   : get_metadata_vendor_tag_ops
4100 *
4101 * DESCRIPTION:
4102 *
4103 * PARAMETERS :
4104 *
4105 *
4106 * RETURN     :
4107 *==========================================================================*/
4108
4109void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4110                const struct camera3_device *device,
4111                vendor_tag_query_ops_t* ops)
4112{
4113    ALOGV("%s: E", __func__);
4114    QCamera3HardwareInterface *hw =
4115        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4116    if (!hw) {
4117        ALOGE("%s: NULL camera device", __func__);
4118        return;
4119    }
4120
4121    hw->getMetadataVendorTagOps(ops);
4122    ALOGV("%s: X", __func__);
4123    return;
4124}
4125
4126/*===========================================================================
4127 * FUNCTION   : dump
4128 *
4129 * DESCRIPTION:
4130 *
4131 * PARAMETERS :
4132 *
4133 *
4134 * RETURN     :
4135 *==========================================================================*/
4136
4137void QCamera3HardwareInterface::dump(
4138                const struct camera3_device *device, int fd)
4139{
4140    ALOGV("%s: E", __func__);
4141    QCamera3HardwareInterface *hw =
4142        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4143    if (!hw) {
4144        ALOGE("%s: NULL camera device", __func__);
4145        return;
4146    }
4147
4148    hw->dump(fd);
4149    ALOGV("%s: X", __func__);
4150    return;
4151}
4152
4153/*===========================================================================
4154 * FUNCTION   : flush
4155 *
4156 * DESCRIPTION:
4157 *
4158 * PARAMETERS :
4159 *
4160 *
4161 * RETURN     :
4162 *==========================================================================*/
4163
4164int QCamera3HardwareInterface::flush(
4165                const struct camera3_device *device)
4166{
4167    int rc;
4168    ALOGV("%s: E", __func__);
4169    QCamera3HardwareInterface *hw =
4170        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4171    if (!hw) {
4172        ALOGE("%s: NULL camera device", __func__);
4173        return -EINVAL;
4174    }
4175
4176    rc = hw->flush();
4177    ALOGV("%s: X", __func__);
4178    return rc;
4179}
4180
4181/*===========================================================================
4182 * FUNCTION   : close_camera_device
4183 *
4184 * DESCRIPTION:
4185 *
4186 * PARAMETERS :
4187 *
4188 *
4189 * RETURN     :
4190 *==========================================================================*/
4191int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4192{
4193    ALOGV("%s: E", __func__);
4194    int ret = NO_ERROR;
4195    QCamera3HardwareInterface *hw =
4196        reinterpret_cast<QCamera3HardwareInterface *>(
4197            reinterpret_cast<camera3_device_t *>(device)->priv);
4198    if (!hw) {
4199        ALOGE("NULL camera device");
4200        return BAD_VALUE;
4201    }
4202    delete hw;
4203
4204    pthread_mutex_lock(&mCameraSessionLock);
4205    mCameraSessionActive = 0;
4206    pthread_mutex_unlock(&mCameraSessionLock);
4207    ALOGV("%s: X", __func__);
4208    return ret;
4209}
4210
4211/*===========================================================================
4212 * FUNCTION   : getWaveletDenoiseProcessPlate
4213 *
4214 * DESCRIPTION: query wavelet denoise process plate
4215 *
4216 * PARAMETERS : None
4217 *
4218 * RETURN     : WNR prcocess plate vlaue
4219 *==========================================================================*/
4220cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4221{
4222    char prop[PROPERTY_VALUE_MAX];
4223    memset(prop, 0, sizeof(prop));
4224    property_get("persist.denoise.process.plates", prop, "0");
4225    int processPlate = atoi(prop);
4226    switch(processPlate) {
4227    case 0:
4228        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4229    case 1:
4230        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4231    case 2:
4232        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4233    case 3:
4234        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4235    default:
4236        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4237    }
4238}
4239
4240/*===========================================================================
4241 * FUNCTION   : needRotationReprocess
4242 *
4243 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4244 *
4245 * PARAMETERS : none
4246 *
4247 * RETURN     : true: needed
4248 *              false: no need
4249 *==========================================================================*/
4250bool QCamera3HardwareInterface::needRotationReprocess()
4251{
4252
4253    if (!mJpegSettings->is_jpeg_format) {
4254        // RAW image, no need to reprocess
4255        return false;
4256    }
4257
4258    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4259        mJpegSettings->jpeg_orientation > 0) {
4260        // current rotation is not zero, and pp has the capability to process rotation
4261        ALOGD("%s: need do reprocess for rotation", __func__);
4262        return true;
4263    }
4264
4265    return false;
4266}
4267
4268/*===========================================================================
4269 * FUNCTION   : needReprocess
4270 *
4271 * DESCRIPTION: if reprocess in needed
4272 *
4273 * PARAMETERS : none
4274 *
4275 * RETURN     : true: needed
4276 *              false: no need
4277 *==========================================================================*/
4278bool QCamera3HardwareInterface::needReprocess()
4279{
4280    if (!mJpegSettings->is_jpeg_format) {
4281        // RAW image, no need to reprocess
4282        return false;
4283    }
4284
4285    if ((mJpegSettings->min_required_pp_mask > 0) ||
4286         isWNREnabled()) {
4287        // TODO: add for ZSL HDR later
4288        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4289        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4290        return true;
4291    }
4292    return needRotationReprocess();
4293}
4294
4295/*===========================================================================
4296 * FUNCTION   : addOnlineReprocChannel
4297 *
4298 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4299 *              coming from input channel
4300 *
4301 * PARAMETERS :
4302 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4303 *
4304 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4305 *==========================================================================*/
4306QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4307              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4308{
4309    int32_t rc = NO_ERROR;
4310    QCamera3ReprocessChannel *pChannel = NULL;
4311    if (pInputChannel == NULL) {
4312        ALOGE("%s: input channel obj is NULL", __func__);
4313        return NULL;
4314    }
4315
4316    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4317            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4318    if (NULL == pChannel) {
4319        ALOGE("%s: no mem for reprocess channel", __func__);
4320        return NULL;
4321    }
4322
4323    // Capture channel, only need snapshot and postview streams start together
4324    mm_camera_channel_attr_t attr;
4325    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4326    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4327    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4328    rc = pChannel->initialize();
4329    if (rc != NO_ERROR) {
4330        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4331        delete pChannel;
4332        return NULL;
4333    }
4334
4335    // pp feature config
4336    cam_pp_feature_config_t pp_config;
4337    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4338    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4339        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4340        pp_config.sharpness = mJpegSettings->sharpness;
4341    }
4342
4343    if (isWNREnabled()) {
4344        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4345        pp_config.denoise2d.denoise_enable = 1;
4346        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4347    }
4348    if (needRotationReprocess()) {
4349        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4350        int rotation = mJpegSettings->jpeg_orientation;
4351        if (rotation == 0) {
4352            pp_config.rotation = ROTATE_0;
4353        } else if (rotation == 90) {
4354            pp_config.rotation = ROTATE_90;
4355        } else if (rotation == 180) {
4356            pp_config.rotation = ROTATE_180;
4357        } else if (rotation == 270) {
4358            pp_config.rotation = ROTATE_270;
4359        }
4360    }
4361
4362   rc = pChannel->addReprocStreamsFromSource(pp_config,
4363                                             pInputChannel,
4364                                             mMetadataChannel);
4365
4366    if (rc != NO_ERROR) {
4367        delete pChannel;
4368        return NULL;
4369    }
4370    return pChannel;
4371}
4372
4373int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4374{
4375    return gCamCapability[mCameraId]->min_num_pp_bufs;
4376}
4377
4378bool QCamera3HardwareInterface::isWNREnabled() {
4379    return gCamCapability[mCameraId]->isWnrSupported;
4380}
4381
4382}; //end namespace qcamera
4383