QCamera3HWI.cpp revision 020cc22e9d3b14bffb22f7c4ace81201d833aeac
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48
49#define MAX(a, b) ((a) > (b) ? (a) : (b))
50
51#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
52cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
53parm_buffer_t *prevSettings;
54const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
55
56pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
57    PTHREAD_MUTEX_INITIALIZER;
58unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
59
60const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
61    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
62    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
63    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
64    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
65    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
66    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
67    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
68    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
70};
71
72const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
73    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
74    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
75    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
76    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
77    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
78    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
79    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
81    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
82};
83
84const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
85    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
86    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
88    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
90    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
91    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
92    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
93    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
94    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
95    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
96    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
97    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
98    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
99    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
100};
101
102const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
103    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
104    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
105    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
106    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
107    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
116};
117
118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
119    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
120    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
122    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
124};
125
126const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
127    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
128    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
129    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
130};
131
132const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
133    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
135};
136
137const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
138                                             320, 240, 176, 144, 0, 0};
139
140camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
141    initialize:                         QCamera3HardwareInterface::initialize,
142    configure_streams:                  QCamera3HardwareInterface::configure_streams,
143    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
144    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
145    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
146    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
147    dump:                               QCamera3HardwareInterface::dump,
148};
149
150
151/*===========================================================================
152 * FUNCTION   : QCamera3HardwareInterface
153 *
154 * DESCRIPTION: constructor of QCamera3HardwareInterface
155 *
156 * PARAMETERS :
157 *   @cameraId  : camera ID
158 *
159 * RETURN     : none
160 *==========================================================================*/
161QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
162    : mCameraId(cameraId),
163      mCameraHandle(NULL),
164      mCameraOpened(false),
165      mCameraInitialized(false),
166      mCallbackOps(NULL),
167      mInputStream(NULL),
168      mMetadataChannel(NULL),
169      mPictureChannel(NULL),
170      mFirstRequest(false),
171      mParamHeap(NULL),
172      mParameters(NULL),
173      mJpegSettings(NULL),
174      mIsZslMode(false),
175      mMinProcessedFrameDuration(0),
176      mMinJpegFrameDuration(0),
177      mMinRawFrameDuration(0),
178      m_pPowerModule(NULL)
179{
180    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
181    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
182    mCameraDevice.common.close = close_camera_device;
183    mCameraDevice.ops = &mCameraOps;
184    mCameraDevice.priv = this;
185    gCamCapability[cameraId]->version = CAM_HAL_V3;
186    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
187    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
188    gCamCapability[cameraId]->min_num_pp_bufs = 3;
189
190    pthread_cond_init(&mRequestCond, NULL);
191    mPendingRequest = 0;
192    mCurrentRequestId = -1;
193    pthread_mutex_init(&mMutex, NULL);
194
195    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
196        mDefaultMetadata[i] = NULL;
197
198#ifdef HAS_MULTIMEDIA_HINTS
199    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
200        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
201    }
202#endif
203}
204
205/*===========================================================================
206 * FUNCTION   : ~QCamera3HardwareInterface
207 *
208 * DESCRIPTION: destructor of QCamera3HardwareInterface
209 *
210 * PARAMETERS : none
211 *
212 * RETURN     : none
213 *==========================================================================*/
214QCamera3HardwareInterface::~QCamera3HardwareInterface()
215{
216    ALOGV("%s: E", __func__);
217    /* We need to stop all streams before deleting any stream */
218    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
219        it != mStreamInfo.end(); it++) {
220        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
221        if (channel)
222           channel->stop();
223    }
224    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
225        it != mStreamInfo.end(); it++) {
226        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
227        if (channel)
228            delete channel;
229        free (*it);
230    }
231
232    mPictureChannel = NULL;
233
234    if (mJpegSettings != NULL) {
235        free(mJpegSettings);
236        mJpegSettings = NULL;
237    }
238
239    /* Clean up all channels */
240    if (mCameraInitialized) {
241        mMetadataChannel->stop();
242        delete mMetadataChannel;
243        mMetadataChannel = NULL;
244        deinitParameters();
245    }
246
247    if (mCameraOpened)
248        closeCamera();
249
250    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
251        if (mDefaultMetadata[i])
252            free_camera_metadata(mDefaultMetadata[i]);
253
254    pthread_cond_destroy(&mRequestCond);
255
256    pthread_mutex_destroy(&mMutex);
257    ALOGV("%s: X", __func__);
258}
259
260/*===========================================================================
261 * FUNCTION   : openCamera
262 *
263 * DESCRIPTION: open camera
264 *
265 * PARAMETERS :
266 *   @hw_device  : double ptr for camera device struct
267 *
268 * RETURN     : int32_t type of status
269 *              NO_ERROR  -- success
270 *              none-zero failure code
271 *==========================================================================*/
272int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
273{
274    int rc = 0;
275    pthread_mutex_lock(&mCameraSessionLock);
276    if (mCameraSessionActive) {
277        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
278        pthread_mutex_unlock(&mCameraSessionLock);
279        return INVALID_OPERATION;
280    }
281
282    if (mCameraOpened) {
283        *hw_device = NULL;
284        return PERMISSION_DENIED;
285    }
286
287    rc = openCamera();
288    if (rc == 0) {
289        *hw_device = &mCameraDevice.common;
290        mCameraSessionActive = 1;
291    } else
292        *hw_device = NULL;
293
294#ifdef HAS_MULTIMEDIA_HINTS
295    if (rc == 0) {
296        if (m_pPowerModule) {
297            if (m_pPowerModule->powerHint) {
298                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
299                        (void *)"state=1");
300            }
301        }
302    }
303#endif
304    pthread_mutex_unlock(&mCameraSessionLock);
305    return rc;
306}
307
308/*===========================================================================
309 * FUNCTION   : openCamera
310 *
311 * DESCRIPTION: open camera
312 *
313 * PARAMETERS : none
314 *
315 * RETURN     : int32_t type of status
316 *              NO_ERROR  -- success
317 *              none-zero failure code
318 *==========================================================================*/
319int QCamera3HardwareInterface::openCamera()
320{
321    if (mCameraHandle) {
322        ALOGE("Failure: Camera already opened");
323        return ALREADY_EXISTS;
324    }
325    mCameraHandle = camera_open(mCameraId);
326    if (!mCameraHandle) {
327        ALOGE("camera_open failed.");
328        return UNKNOWN_ERROR;
329    }
330
331    mCameraOpened = true;
332
333    return NO_ERROR;
334}
335
336/*===========================================================================
337 * FUNCTION   : closeCamera
338 *
339 * DESCRIPTION: close camera
340 *
341 * PARAMETERS : none
342 *
343 * RETURN     : int32_t type of status
344 *              NO_ERROR  -- success
345 *              none-zero failure code
346 *==========================================================================*/
347int QCamera3HardwareInterface::closeCamera()
348{
349    int rc = NO_ERROR;
350
351    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
352    mCameraHandle = NULL;
353    mCameraOpened = false;
354
355#ifdef HAS_MULTIMEDIA_HINTS
356    if (rc == NO_ERROR) {
357        if (m_pPowerModule) {
358            if (m_pPowerModule->powerHint) {
359                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
360                        (void *)"state=0");
361            }
362        }
363    }
364#endif
365
366    return rc;
367}
368
369/*===========================================================================
370 * FUNCTION   : initialize
371 *
372 * DESCRIPTION: Initialize frameworks callback functions
373 *
374 * PARAMETERS :
375 *   @callback_ops : callback function to frameworks
376 *
377 * RETURN     :
378 *
379 *==========================================================================*/
380int QCamera3HardwareInterface::initialize(
381        const struct camera3_callback_ops *callback_ops)
382{
383    int rc;
384
385    pthread_mutex_lock(&mMutex);
386
387    rc = initParameters();
388    if (rc < 0) {
389        ALOGE("%s: initParamters failed %d", __func__, rc);
390       goto err1;
391    }
392    //Create metadata channel and initialize it
393    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
394                    mCameraHandle->ops, captureResultCb,
395                    &gCamCapability[mCameraId]->padding_info, this);
396    if (mMetadataChannel == NULL) {
397        ALOGE("%s: failed to allocate metadata channel", __func__);
398        rc = -ENOMEM;
399        goto err2;
400    }
401    rc = mMetadataChannel->initialize();
402    if (rc < 0) {
403        ALOGE("%s: metadata channel initialization failed", __func__);
404        goto err3;
405    }
406
407    mCallbackOps = callback_ops;
408
409    pthread_mutex_unlock(&mMutex);
410    mCameraInitialized = true;
411    return 0;
412
413err3:
414    delete mMetadataChannel;
415    mMetadataChannel = NULL;
416err2:
417    deinitParameters();
418err1:
419    pthread_mutex_unlock(&mMutex);
420    return rc;
421}
422
423/*===========================================================================
424 * FUNCTION   : configureStreams
425 *
426 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
427 *              and output streams.
428 *
429 * PARAMETERS :
430 *   @stream_list : streams to be configured
431 *
432 * RETURN     :
433 *
434 *==========================================================================*/
435int QCamera3HardwareInterface::configureStreams(
436        camera3_stream_configuration_t *streamList)
437{
438    int rc = 0;
439    pthread_mutex_lock(&mMutex);
440    // Sanity check stream_list
441    if (streamList == NULL) {
442        ALOGE("%s: NULL stream configuration", __func__);
443        pthread_mutex_unlock(&mMutex);
444        return BAD_VALUE;
445    }
446
447    if (streamList->streams == NULL) {
448        ALOGE("%s: NULL stream list", __func__);
449        pthread_mutex_unlock(&mMutex);
450        return BAD_VALUE;
451    }
452
453    if (streamList->num_streams < 1) {
454        ALOGE("%s: Bad number of streams requested: %d", __func__,
455                streamList->num_streams);
456        pthread_mutex_unlock(&mMutex);
457        return BAD_VALUE;
458    }
459
460    camera3_stream_t *inputStream = NULL;
461    camera3_stream_t *jpegStream = NULL;
462    /* first invalidate all the steams in the mStreamList
463     * if they appear again, they will be validated */
464    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
465            it != mStreamInfo.end(); it++) {
466        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
467        channel->stop();
468        (*it)->status = INVALID;
469    }
470
471    for (size_t i = 0; i < streamList->num_streams; i++) {
472        camera3_stream_t *newStream = streamList->streams[i];
473        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
474                __func__, newStream->stream_type, newStream->format,
475                 newStream->width, newStream->height);
476        //if the stream is in the mStreamList validate it
477        bool stream_exists = false;
478        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
479                it != mStreamInfo.end(); it++) {
480            if ((*it)->stream == newStream) {
481                QCamera3Channel *channel =
482                    (QCamera3Channel*)(*it)->stream->priv;
483                stream_exists = true;
484                (*it)->status = RECONFIGURE;
485                /*delete the channel object associated with the stream because
486                  we need to reconfigure*/
487                delete channel;
488                (*it)->stream->priv = NULL;
489            }
490        }
491        if (!stream_exists) {
492            //new stream
493            stream_info_t* stream_info;
494            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
495            stream_info->stream = newStream;
496            stream_info->status = VALID;
497            stream_info->registered = 0;
498            mStreamInfo.push_back(stream_info);
499        }
500        if (newStream->stream_type == CAMERA3_STREAM_INPUT
501                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
502            if (inputStream != NULL) {
503                ALOGE("%s: Multiple input streams requested!", __func__);
504                pthread_mutex_unlock(&mMutex);
505                return BAD_VALUE;
506            }
507            inputStream = newStream;
508        }
509        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
510            jpegStream = newStream;
511        }
512    }
513    mInputStream = inputStream;
514
515    /*clean up invalid streams*/
516    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
517            it != mStreamInfo.end();) {
518        if(((*it)->status) == INVALID){
519            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
520            delete channel;
521            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
522            free(*it);
523            it = mStreamInfo.erase(it);
524        } else {
525            it++;
526        }
527    }
528
529    //mMetadataChannel->stop();
530
531    /* Allocate channel objects for the requested streams */
532    for (size_t i = 0; i < streamList->num_streams; i++) {
533        camera3_stream_t *newStream = streamList->streams[i];
534        if (newStream->priv == NULL) {
535            //New stream, construct channel
536            switch (newStream->stream_type) {
537            case CAMERA3_STREAM_INPUT:
538                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
539                break;
540            case CAMERA3_STREAM_BIDIRECTIONAL:
541                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
542                    GRALLOC_USAGE_HW_CAMERA_WRITE;
543                break;
544            case CAMERA3_STREAM_OUTPUT:
545                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
546                break;
547            default:
548                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
549                break;
550            }
551
552            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
553                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
554                QCamera3Channel *channel;
555                switch (newStream->format) {
556                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
557                case HAL_PIXEL_FORMAT_YCbCr_420_888:
558                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
559                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
560                        jpegStream) {
561                        uint32_t width = jpegStream->width;
562                        uint32_t height = jpegStream->height;
563                        mIsZslMode = true;
564                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
565                            mCameraHandle->ops, captureResultCb,
566                            &gCamCapability[mCameraId]->padding_info, this, newStream,
567                            width, height);
568                    } else
569                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
570                            mCameraHandle->ops, captureResultCb,
571                            &gCamCapability[mCameraId]->padding_info, this, newStream);
572                    if (channel == NULL) {
573                        ALOGE("%s: allocation of channel failed", __func__);
574                        pthread_mutex_unlock(&mMutex);
575                        return -ENOMEM;
576                    }
577
578                    newStream->priv = channel;
579                    break;
580                case HAL_PIXEL_FORMAT_BLOB:
581                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
582                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
583                            mCameraHandle->ops, captureResultCb,
584                            &gCamCapability[mCameraId]->padding_info, this, newStream);
585                    if (mPictureChannel == NULL) {
586                        ALOGE("%s: allocation of channel failed", __func__);
587                        pthread_mutex_unlock(&mMutex);
588                        return -ENOMEM;
589                    }
590                    newStream->priv = (QCamera3Channel*)mPictureChannel;
591                    break;
592
593                //TODO: Add support for app consumed format?
594                default:
595                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
596                    break;
597                }
598            }
599        } else {
600            // Channel already exists for this stream
601            // Do nothing for now
602        }
603    }
604    /*For the streams to be reconfigured we need to register the buffers
605      since the framework wont*/
606    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
607            it != mStreamInfo.end(); it++) {
608        if ((*it)->status == RECONFIGURE) {
609            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
610            /*only register buffers for streams that have already been
611              registered*/
612            if ((*it)->registered) {
613                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
614                        (*it)->buffer_set.buffers);
615                if (rc != NO_ERROR) {
616                    ALOGE("%s: Failed to register the buffers of old stream,\
617                            rc = %d", __func__, rc);
618                }
619                ALOGV("%s: channel %p has %d buffers",
620                        __func__, channel, (*it)->buffer_set.num_buffers);
621            }
622        }
623
624        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
625        if (index == NAME_NOT_FOUND) {
626            mPendingBuffersMap.add((*it)->stream, 0);
627        } else {
628            mPendingBuffersMap.editValueAt(index) = 0;
629        }
630    }
631
632    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
633    mPendingRequestsList.clear();
634
635    //settings/parameters don't carry over for new configureStreams
636    memset(mParameters, 0, sizeof(parm_buffer_t));
637    mFirstRequest = true;
638
639    //Get min frame duration for this streams configuration
640    deriveMinFrameDuration();
641
642    pthread_mutex_unlock(&mMutex);
643    return rc;
644}
645
646/*===========================================================================
647 * FUNCTION   : validateCaptureRequest
648 *
649 * DESCRIPTION: validate a capture request from camera service
650 *
651 * PARAMETERS :
652 *   @request : request from framework to process
653 *
654 * RETURN     :
655 *
656 *==========================================================================*/
657int QCamera3HardwareInterface::validateCaptureRequest(
658                    camera3_capture_request_t *request)
659{
660    ssize_t idx = 0;
661    const camera3_stream_buffer_t *b;
662    CameraMetadata meta;
663
664    /* Sanity check the request */
665    if (request == NULL) {
666        ALOGE("%s: NULL capture request", __func__);
667        return BAD_VALUE;
668    }
669
670    uint32_t frameNumber = request->frame_number;
671    if (request->input_buffer != NULL &&
672            request->input_buffer->stream != mInputStream) {
673        ALOGE("%s: Request %d: Input buffer not from input stream!",
674                __FUNCTION__, frameNumber);
675        return BAD_VALUE;
676    }
677    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
678        ALOGE("%s: Request %d: No output buffers provided!",
679                __FUNCTION__, frameNumber);
680        return BAD_VALUE;
681    }
682    if (request->input_buffer != NULL) {
683        b = request->input_buffer;
684        QCamera3Channel *channel =
685            static_cast<QCamera3Channel*>(b->stream->priv);
686        if (channel == NULL) {
687            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
688                    __func__, frameNumber, idx);
689            return BAD_VALUE;
690        }
691        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
692            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
693                    __func__, frameNumber, idx);
694            return BAD_VALUE;
695        }
696        if (b->release_fence != -1) {
697            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
698                    __func__, frameNumber, idx);
699            return BAD_VALUE;
700        }
701        if (b->buffer == NULL) {
702            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
703                    __func__, frameNumber, idx);
704            return BAD_VALUE;
705        }
706    }
707
708    // Validate all buffers
709    b = request->output_buffers;
710    do {
711        QCamera3Channel *channel =
712                static_cast<QCamera3Channel*>(b->stream->priv);
713        if (channel == NULL) {
714            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
715                    __func__, frameNumber, idx);
716            return BAD_VALUE;
717        }
718        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
719            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
720                    __func__, frameNumber, idx);
721            return BAD_VALUE;
722        }
723        if (b->release_fence != -1) {
724            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
725                    __func__, frameNumber, idx);
726            return BAD_VALUE;
727        }
728        if (b->buffer == NULL) {
729            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
730                    __func__, frameNumber, idx);
731            return BAD_VALUE;
732        }
733        idx++;
734        b = request->output_buffers + idx;
735    } while (idx < (ssize_t)request->num_output_buffers);
736
737    return NO_ERROR;
738}
739
740/*===========================================================================
741 * FUNCTION   : deriveMinFrameDuration
742 *
743 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
744 *              on currently configured streams.
745 *
746 * PARAMETERS : NONE
747 *
748 * RETURN     : NONE
749 *
750 *==========================================================================*/
751void QCamera3HardwareInterface::deriveMinFrameDuration()
752{
753    int32_t maxJpegDimension, maxProcessedDimension;
754
755    maxJpegDimension = 0;
756    maxProcessedDimension = 0;
757
758    // Figure out maximum jpeg, processed, and raw dimensions
759    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
760        it != mStreamInfo.end(); it++) {
761
762        // Input stream doesn't have valid stream_type
763        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
764            continue;
765
766        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
767        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
768            if (dimension > maxJpegDimension)
769                maxJpegDimension = dimension;
770        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
771            if (dimension > maxProcessedDimension)
772                maxProcessedDimension = dimension;
773        }
774    }
775
776    //Assume all jpeg dimensions are in processed dimensions.
777    if (maxJpegDimension > maxProcessedDimension)
778        maxProcessedDimension = maxJpegDimension;
779
780    //Find minimum durations for processed, jpeg, and raw
781    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
782    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
783        if (maxProcessedDimension ==
784            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
785            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
786            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
787            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
788            break;
789        }
790    }
791}
792
793/*===========================================================================
794 * FUNCTION   : getMinFrameDuration
795 *
796 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
797 *              and current request configuration.
798 *
799 * PARAMETERS : @request: requset sent by the frameworks
800 *
801 * RETURN     : min farme duration for a particular request
802 *
803 *==========================================================================*/
804int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
805{
806    bool hasJpegStream = false;
807    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
808        const camera3_stream_t *stream = request->output_buffers[i].stream;
809        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
810            hasJpegStream = true;
811    }
812
813    if (!hasJpegStream)
814        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
815    else
816        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
817}
818
819/*===========================================================================
820 * FUNCTION   : registerStreamBuffers
821 *
822 * DESCRIPTION: Register buffers for a given stream with the HAL device.
823 *
824 * PARAMETERS :
825 *   @stream_list : streams to be configured
826 *
827 * RETURN     :
828 *
829 *==========================================================================*/
830int QCamera3HardwareInterface::registerStreamBuffers(
831        const camera3_stream_buffer_set_t *buffer_set)
832{
833    int rc = 0;
834
835    pthread_mutex_lock(&mMutex);
836
837    if (buffer_set == NULL) {
838        ALOGE("%s: Invalid buffer_set parameter.", __func__);
839        pthread_mutex_unlock(&mMutex);
840        return -EINVAL;
841    }
842    if (buffer_set->stream == NULL) {
843        ALOGE("%s: Invalid stream parameter.", __func__);
844        pthread_mutex_unlock(&mMutex);
845        return -EINVAL;
846    }
847    if (buffer_set->num_buffers < 1) {
848        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
849        pthread_mutex_unlock(&mMutex);
850        return -EINVAL;
851    }
852    if (buffer_set->buffers == NULL) {
853        ALOGE("%s: Invalid buffers parameter.", __func__);
854        pthread_mutex_unlock(&mMutex);
855        return -EINVAL;
856    }
857
858    camera3_stream_t *stream = buffer_set->stream;
859    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
860
861    //set the buffer_set in the mStreamInfo array
862    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
863            it != mStreamInfo.end(); it++) {
864        if ((*it)->stream == stream) {
865            uint32_t numBuffers = buffer_set->num_buffers;
866            (*it)->buffer_set.stream = buffer_set->stream;
867            (*it)->buffer_set.num_buffers = numBuffers;
868            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
869            if ((*it)->buffer_set.buffers == NULL) {
870                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
871                pthread_mutex_unlock(&mMutex);
872                return -ENOMEM;
873            }
874            for (size_t j = 0; j < numBuffers; j++){
875                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
876            }
877            (*it)->registered = 1;
878        }
879    }
880    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
881    if (rc < 0) {
882        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
883        pthread_mutex_unlock(&mMutex);
884        return -ENODEV;
885    }
886
887    pthread_mutex_unlock(&mMutex);
888    return NO_ERROR;
889}
890
891/*===========================================================================
892 * FUNCTION   : processCaptureRequest
893 *
894 * DESCRIPTION: process a capture request from camera service
895 *
896 * PARAMETERS :
897 *   @request : request from framework to process
898 *
899 * RETURN     :
900 *
901 *==========================================================================*/
902int QCamera3HardwareInterface::processCaptureRequest(
903                    camera3_capture_request_t *request)
904{
905    int rc = NO_ERROR;
906    int32_t request_id;
907    CameraMetadata meta;
908
909    pthread_mutex_lock(&mMutex);
910
911    // For first capture request, stream on all streams
912    if (mFirstRequest) {
913        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
914            it != mStreamInfo.end(); it++) {
915            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
916            channel->start();
917        }
918    }
919
920    rc = validateCaptureRequest(request);
921    if (rc != NO_ERROR) {
922        ALOGE("%s: incoming request is not valid", __func__);
923        pthread_mutex_unlock(&mMutex);
924        return rc;
925    }
926
927    uint32_t frameNumber = request->frame_number;
928    uint32_t streamTypeMask = 0;
929
930    meta = request->settings;
931    if (meta.exists(ANDROID_REQUEST_ID)) {
932        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
933        mCurrentRequestId = request_id;
934        ALOGV("%s: Received request with id: %d",__func__, request_id);
935    } else if (mFirstRequest || mCurrentRequestId == -1){
936        ALOGE("%s: Unable to find request id field, \
937                & no previous id available", __func__);
938        return NAME_NOT_FOUND;
939    } else {
940        ALOGV("%s: Re-using old request id", __func__);
941        request_id = mCurrentRequestId;
942    }
943
944    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
945                                    __func__, __LINE__,
946                                    request->num_output_buffers,
947                                    request->input_buffer,
948                                    frameNumber);
949    // Acquire all request buffers first
950    int blob_request = 0;
951    for (size_t i = 0; i < request->num_output_buffers; i++) {
952        const camera3_stream_buffer_t& output = request->output_buffers[i];
953        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
954        sp<Fence> acquireFence = new Fence(output.acquire_fence);
955
956        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
957        //Call function to store local copy of jpeg data for encode params.
958            blob_request = 1;
959            rc = getJpegSettings(request->settings);
960            if (rc < 0) {
961                ALOGE("%s: failed to get jpeg parameters", __func__);
962                pthread_mutex_unlock(&mMutex);
963                return rc;
964            }
965        }
966
967        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
968        if (rc != OK) {
969            ALOGE("%s: fence wait failed %d", __func__, rc);
970            pthread_mutex_unlock(&mMutex);
971            return rc;
972        }
973        streamTypeMask |= channel->getStreamTypeMask();
974    }
975
976    rc = setFrameParameters(request, streamTypeMask);
977    if (rc < 0) {
978        ALOGE("%s: fail to set frame parameters", __func__);
979        pthread_mutex_unlock(&mMutex);
980        return rc;
981    }
982
983    /* Update pending request list and pending buffers map */
984    PendingRequestInfo pendingRequest;
985    pendingRequest.frame_number = frameNumber;
986    pendingRequest.num_buffers = request->num_output_buffers;
987    pendingRequest.request_id = request_id;
988    pendingRequest.blob_request = blob_request;
989
990    for (size_t i = 0; i < request->num_output_buffers; i++) {
991        RequestedBufferInfo requestedBuf;
992        requestedBuf.stream = request->output_buffers[i].stream;
993        requestedBuf.buffer = NULL;
994        pendingRequest.buffers.push_back(requestedBuf);
995
996        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
997    }
998    mPendingRequestsList.push_back(pendingRequest);
999
1000    // Notify metadata channel we receive a request
1001    mMetadataChannel->request(NULL, frameNumber);
1002
1003    // Call request on other streams
1004    for (size_t i = 0; i < request->num_output_buffers; i++) {
1005        const camera3_stream_buffer_t& output = request->output_buffers[i];
1006        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1007        mm_camera_buf_def_t *pInputBuffer = NULL;
1008
1009        if (channel == NULL) {
1010            ALOGE("%s: invalid channel pointer for stream", __func__);
1011            continue;
1012        }
1013
1014        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1015            QCamera3RegularChannel* inputChannel = NULL;
1016            if(request->input_buffer != NULL){
1017
1018                //Try to get the internal format
1019                inputChannel = (QCamera3RegularChannel*)
1020                    request->input_buffer->stream->priv;
1021                if(inputChannel == NULL ){
1022                    ALOGE("%s: failed to get input channel handle", __func__);
1023                } else {
1024                    pInputBuffer =
1025                        inputChannel->getInternalFormatBuffer(
1026                                request->input_buffer->buffer);
1027                    ALOGD("%s: Input buffer dump",__func__);
1028                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1029                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1030                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1031                }
1032            }
1033            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1034                            pInputBuffer,(QCamera3Channel*)inputChannel);
1035        } else {
1036            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1037                __LINE__, output.buffer, frameNumber);
1038            rc = channel->request(output.buffer, frameNumber);
1039        }
1040        if (rc < 0)
1041            ALOGE("%s: request failed", __func__);
1042    }
1043
1044    mFirstRequest = false;
1045
1046    //Block on conditional variable
1047    mPendingRequest = 1;
1048    while (mPendingRequest == 1) {
1049        pthread_cond_wait(&mRequestCond, &mMutex);
1050    }
1051
1052    pthread_mutex_unlock(&mMutex);
1053    return rc;
1054}
1055
1056/*===========================================================================
1057 * FUNCTION   : getMetadataVendorTagOps
1058 *
1059 * DESCRIPTION:
1060 *
1061 * PARAMETERS :
1062 *
1063 *
1064 * RETURN     :
1065 *==========================================================================*/
1066void QCamera3HardwareInterface::getMetadataVendorTagOps(
1067                    vendor_tag_query_ops_t* /*ops*/)
1068{
1069    /* Enable locks when we eventually add Vendor Tags */
1070    /*
1071    pthread_mutex_lock(&mMutex);
1072
1073    pthread_mutex_unlock(&mMutex);
1074    */
1075    return;
1076}
1077
1078/*===========================================================================
1079 * FUNCTION   : dump
1080 *
1081 * DESCRIPTION:
1082 *
1083 * PARAMETERS :
1084 *
1085 *
1086 * RETURN     :
1087 *==========================================================================*/
1088void QCamera3HardwareInterface::dump(int /*fd*/)
1089{
1090    /*Enable lock when we implement this function*/
1091    /*
1092    pthread_mutex_lock(&mMutex);
1093
1094    pthread_mutex_unlock(&mMutex);
1095    */
1096    return;
1097}
1098
1099
1100/*===========================================================================
1101 * FUNCTION   : captureResultCb
1102 *
1103 * DESCRIPTION: Callback handler for all capture result
1104 *              (streams, as well as metadata)
1105 *
1106 * PARAMETERS :
1107 *   @metadata : metadata information
1108 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1109 *               NULL if metadata.
1110 *
1111 * RETURN     : NONE
1112 *==========================================================================*/
1113void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1114                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1115{
1116    pthread_mutex_lock(&mMutex);
1117
1118    if (metadata_buf) {
1119        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1120        int32_t frame_number_valid = *(int32_t *)
1121            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1122        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1123            CAM_INTF_META_PENDING_REQUESTS, metadata);
1124        uint32_t frame_number = *(uint32_t *)
1125            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1126        const struct timeval *tv = (const struct timeval *)
1127            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1128        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1129            tv->tv_usec * NSEC_PER_USEC;
1130
1131        if (!frame_number_valid) {
1132            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1133            mMetadataChannel->bufDone(metadata_buf);
1134            goto done_metadata;
1135        }
1136        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1137                frame_number, capture_time);
1138
1139        // Go through the pending requests info and send shutter/results to frameworks
1140        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1141                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1142            camera3_capture_result_t result;
1143            camera3_notify_msg_t notify_msg;
1144            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1145
1146            // Flush out all entries with less or equal frame numbers.
1147
1148            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1149            //Right now it's the same as metadata timestamp
1150
1151            //TODO: When there is metadata drop, how do we derive the timestamp of
1152            //dropped frames? For now, we fake the dropped timestamp by substracting
1153            //from the reported timestamp
1154            nsecs_t current_capture_time = capture_time -
1155                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1156
1157            // Send shutter notify to frameworks
1158            notify_msg.type = CAMERA3_MSG_SHUTTER;
1159            notify_msg.message.shutter.frame_number = i->frame_number;
1160            notify_msg.message.shutter.timestamp = current_capture_time;
1161            mCallbackOps->notify(mCallbackOps, &notify_msg);
1162            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1163                    i->frame_number, capture_time);
1164
1165            // Send empty metadata with already filled buffers for dropped metadata
1166            // and send valid metadata with already filled buffers for current metadata
1167            if (i->frame_number < frame_number) {
1168                CameraMetadata dummyMetadata;
1169                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1170                        &current_capture_time, 1);
1171                dummyMetadata.update(ANDROID_REQUEST_ID,
1172                        &(i->request_id), 1);
1173                result.result = dummyMetadata.release();
1174            } else {
1175                result.result = translateCbMetadataToResultMetadata(metadata,
1176                        current_capture_time, i->request_id);
1177                if (i->blob_request && needReprocess()) {
1178                   //If it is a blob request then send the metadata to the picture channel
1179                   mPictureChannel->queueMetadata(metadata_buf);
1180
1181                } else {
1182                   // Return metadata buffer
1183                   mMetadataChannel->bufDone(metadata_buf);
1184                   free(metadata_buf);
1185                }
1186            }
1187            if (!result.result) {
1188                ALOGE("%s: metadata is NULL", __func__);
1189            }
1190            result.frame_number = i->frame_number;
1191            result.num_output_buffers = 0;
1192            result.output_buffers = NULL;
1193            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1194                    j != i->buffers.end(); j++) {
1195                if (j->buffer) {
1196                    result.num_output_buffers++;
1197                }
1198            }
1199
1200            if (result.num_output_buffers > 0) {
1201                camera3_stream_buffer_t *result_buffers =
1202                    new camera3_stream_buffer_t[result.num_output_buffers];
1203                if (!result_buffers) {
1204                    ALOGE("%s: Fatal error: out of memory", __func__);
1205                }
1206                size_t result_buffers_idx = 0;
1207                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1208                        j != i->buffers.end(); j++) {
1209                    if (j->buffer) {
1210                        result_buffers[result_buffers_idx++] = *(j->buffer);
1211                        free(j->buffer);
1212                        j->buffer = NULL;
1213                        mPendingBuffersMap.editValueFor(j->stream)--;
1214                    }
1215                }
1216                result.output_buffers = result_buffers;
1217
1218                mCallbackOps->process_capture_result(mCallbackOps, &result);
1219                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1220                        __func__, result.frame_number, current_capture_time);
1221                free_camera_metadata((camera_metadata_t *)result.result);
1222                delete[] result_buffers;
1223            } else {
1224                mCallbackOps->process_capture_result(mCallbackOps, &result);
1225                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1226                        __func__, result.frame_number, current_capture_time);
1227                free_camera_metadata((camera_metadata_t *)result.result);
1228            }
1229            // erase the element from the list
1230            i = mPendingRequestsList.erase(i);
1231        }
1232
1233
1234done_metadata:
1235        bool max_buffers_dequeued = false;
1236        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1237            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1238            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1239            if (queued_buffers == stream->max_buffers) {
1240                max_buffers_dequeued = true;
1241                break;
1242            }
1243        }
1244        if (!max_buffers_dequeued && !pending_requests) {
1245            // Unblock process_capture_request
1246            mPendingRequest = 0;
1247            pthread_cond_signal(&mRequestCond);
1248        }
1249    } else {
1250        // If the frame number doesn't exist in the pending request list,
1251        // directly send the buffer to the frameworks, and update pending buffers map
1252        // Otherwise, book-keep the buffer.
1253        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1254        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1255            i++;
1256        }
1257        if (i == mPendingRequestsList.end()) {
1258            // Verify all pending requests frame_numbers are greater
1259            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1260                    j != mPendingRequestsList.end(); j++) {
1261                if (j->frame_number < frame_number) {
1262                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1263                            __func__, j->frame_number, frame_number);
1264                }
1265            }
1266            camera3_capture_result_t result;
1267            result.result = NULL;
1268            result.frame_number = frame_number;
1269            result.num_output_buffers = 1;
1270            result.output_buffers = buffer;
1271            ALOGV("%s: result frame_number = %d, buffer = %p",
1272                    __func__, frame_number, buffer);
1273            mPendingBuffersMap.editValueFor(buffer->stream)--;
1274            mCallbackOps->process_capture_result(mCallbackOps, &result);
1275        } else {
1276            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1277                    j != i->buffers.end(); j++) {
1278                if (j->stream == buffer->stream) {
1279                    if (j->buffer != NULL) {
1280                        ALOGE("%s: Error: buffer is already set", __func__);
1281                    } else {
1282                        j->buffer = (camera3_stream_buffer_t *)malloc(
1283                                sizeof(camera3_stream_buffer_t));
1284                        *(j->buffer) = *buffer;
1285                        ALOGV("%s: cache buffer %p at result frame_number %d",
1286                                __func__, buffer, frame_number);
1287                    }
1288                }
1289            }
1290        }
1291    }
1292    pthread_mutex_unlock(&mMutex);
1293    return;
1294}
1295
1296/*===========================================================================
1297 * FUNCTION   : translateCbMetadataToResultMetadata
1298 *
1299 * DESCRIPTION:
1300 *
1301 * PARAMETERS :
1302 *   @metadata : metadata information from callback
1303 *
1304 * RETURN     : camera_metadata_t*
1305 *              metadata in a format specified by fwk
1306 *==========================================================================*/
1307camera_metadata_t*
1308QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1309                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1310                                 int32_t request_id)
1311{
1312    CameraMetadata camMetadata;
1313    camera_metadata_t* resultMetadata;
1314
1315    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1316    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1317
1318    /*CAM_INTF_META_HISTOGRAM - TODO*/
1319    /*cam_hist_stats_t  *histogram =
1320      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1321      metadata);*/
1322
1323    /*face detection*/
1324    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1325        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1326    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1327    int32_t faceIds[numFaces];
1328    uint8_t faceScores[numFaces];
1329    int32_t faceRectangles[numFaces * 4];
1330    int32_t faceLandmarks[numFaces * 6];
1331    int j = 0, k = 0;
1332    for (int i = 0; i < numFaces; i++) {
1333        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1334        faceScores[i] = faceDetectionInfo->faces[i].score;
1335        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1336                faceRectangles+j, -1);
1337        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1338        j+= 4;
1339        k+= 6;
1340    }
1341    if (numFaces > 0) {
1342        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1343        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1344        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1345            faceRectangles, numFaces*4);
1346        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1347            faceLandmarks, numFaces*6);
1348    }
1349
1350    uint8_t  *color_correct_mode =
1351        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1352    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1353
1354    int32_t  *ae_precapture_id =
1355        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1356    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1357
1358    /*aec regions*/
1359    cam_area_t  *hAeRegions =
1360        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1361    int32_t aeRegions[5];
1362    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1363    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1364    if(mIsZslMode) {
1365        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1366        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1367    } else {
1368        uint8_t *ae_state =
1369            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1370        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1371    }
1372    uint8_t  *focusMode =
1373        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1374    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1375
1376    /*af regions*/
1377    cam_area_t  *hAfRegions =
1378        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1379    int32_t afRegions[5];
1380    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1381    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1382
1383    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1384    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1385
1386    int32_t  *afTriggerId =
1387        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1388    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1389
1390    uint8_t  *whiteBalance =
1391        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1392    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1393
1394    /*awb regions*/
1395    cam_area_t  *hAwbRegions =
1396        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1397    int32_t awbRegions[5];
1398    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1399    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1400
1401    uint8_t  *whiteBalanceState =
1402        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1403    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1404
1405    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1406    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1407
1408    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1409    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1410
1411    uint8_t  *flashPower =
1412        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1413    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1414
1415    int64_t  *flashFiringTime =
1416        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1417    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1418
1419    /*int32_t  *ledMode =
1420      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1421      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1422
1423    uint8_t  *flashState =
1424        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1425    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1426
1427    uint8_t  *hotPixelMode =
1428        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1429    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1430
1431    float  *lensAperture =
1432        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1433    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1434
1435    float  *filterDensity =
1436        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1437    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1438
1439    float  *focalLength =
1440        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1441    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1442
1443    float  *focusDistance =
1444        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1445    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1446
1447    float  *focusRange =
1448        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1449    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1450
1451    uint8_t  *opticalStab =
1452        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1453    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1454
1455    /*int32_t  *focusState =
1456      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1457      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1458
1459    uint8_t  *noiseRedMode =
1460        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1461    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1462
1463    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1464
1465    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1466        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1467    int32_t scalerCropRegion[4];
1468    scalerCropRegion[0] = hScalerCropRegion->left;
1469    scalerCropRegion[1] = hScalerCropRegion->top;
1470    scalerCropRegion[2] = hScalerCropRegion->width;
1471    scalerCropRegion[3] = hScalerCropRegion->height;
1472    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1473
1474    int64_t  *sensorExpTime =
1475        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1476    mMetadataResponse.exposure_time = *sensorExpTime;
1477    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1478    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1479
1480    int64_t  *sensorFameDuration =
1481        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1482    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1483    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1484
1485    int32_t  *sensorSensitivity =
1486        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1487    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1488    mMetadataResponse.iso_speed = *sensorSensitivity;
1489    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1490
1491    uint8_t  *shadingMode =
1492        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1493    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1494
1495    uint8_t  *faceDetectMode =
1496        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1497    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1498        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1499        *faceDetectMode);
1500    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1501
1502    uint8_t  *histogramMode =
1503        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1504    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1505
1506    uint8_t  *sharpnessMapMode =
1507        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1508    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1509            sharpnessMapMode, 1);
1510
1511    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1512    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1513        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1514    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1515            (int32_t*)sharpnessMap->sharpness,
1516            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1517
1518    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1519        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1520    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1521    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1522    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1523                       (float*)lensShadingMap->lens_shading,
1524                       4*map_width*map_height);
1525
1526    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1527        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1528    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1529
1530    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1531        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1532    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1533                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1534
1535    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1536        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1537    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1538                       predColorCorrectionGains->gains, 4);
1539
1540    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1541        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1542    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1543                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1544
1545    uint8_t *blackLevelLock = (uint8_t*)
1546        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1547    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1548
1549    uint8_t *sceneFlicker = (uint8_t*)
1550        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1551    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1552
1553
1554    resultMetadata = camMetadata.release();
1555    return resultMetadata;
1556}
1557
1558/*===========================================================================
1559 * FUNCTION   : convertToRegions
1560 *
1561 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1562 *
1563 * PARAMETERS :
1564 *   @rect   : cam_rect_t struct to convert
1565 *   @region : int32_t destination array
1566 *   @weight : if we are converting from cam_area_t, weight is valid
1567 *             else weight = -1
1568 *
1569 *==========================================================================*/
1570void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1571    region[0] = rect.left;
1572    region[1] = rect.top;
1573    region[2] = rect.left + rect.width;
1574    region[3] = rect.top + rect.height;
1575    if (weight > -1) {
1576        region[4] = weight;
1577    }
1578}
1579
1580/*===========================================================================
1581 * FUNCTION   : convertFromRegions
1582 *
1583 * DESCRIPTION: helper method to convert from array to cam_rect_t
1584 *
1585 * PARAMETERS :
1586 *   @rect   : cam_rect_t struct to convert
1587 *   @region : int32_t destination array
1588 *   @weight : if we are converting from cam_area_t, weight is valid
1589 *             else weight = -1
1590 *
1591 *==========================================================================*/
1592void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1593                                                   const camera_metadata_t *settings,
1594                                                   uint32_t tag){
1595    CameraMetadata frame_settings;
1596    frame_settings = settings;
1597    int32_t x_min = frame_settings.find(tag).data.i32[0];
1598    int32_t y_min = frame_settings.find(tag).data.i32[1];
1599    int32_t x_max = frame_settings.find(tag).data.i32[2];
1600    int32_t y_max = frame_settings.find(tag).data.i32[3];
1601    roi->weight = frame_settings.find(tag).data.i32[4];
1602    roi->rect.left = x_min;
1603    roi->rect.top = y_min;
1604    roi->rect.width = x_max - x_min;
1605    roi->rect.height = y_max - y_min;
1606}
1607
1608/*===========================================================================
1609 * FUNCTION   : resetIfNeededROI
1610 *
1611 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1612 *              crop region
1613 *
1614 * PARAMETERS :
1615 *   @roi       : cam_area_t struct to resize
1616 *   @scalerCropRegion : cam_crop_region_t region to compare against
1617 *
1618 *
1619 *==========================================================================*/
1620bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1621                                                 const cam_crop_region_t* scalerCropRegion)
1622{
1623    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1624    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1625    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1626    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1627    if ((roi_x_max < scalerCropRegion->left) ||
1628        (roi_y_max < scalerCropRegion->top)  ||
1629        (roi->rect.left > crop_x_max) ||
1630        (roi->rect.top > crop_y_max)){
1631        return false;
1632    }
1633    if (roi->rect.left < scalerCropRegion->left) {
1634        roi->rect.left = scalerCropRegion->left;
1635    }
1636    if (roi->rect.top < scalerCropRegion->top) {
1637        roi->rect.top = scalerCropRegion->top;
1638    }
1639    if (roi_x_max > crop_x_max) {
1640        roi_x_max = crop_x_max;
1641    }
1642    if (roi_y_max > crop_y_max) {
1643        roi_y_max = crop_y_max;
1644    }
1645    roi->rect.width = roi_x_max - roi->rect.left;
1646    roi->rect.height = roi_y_max - roi->rect.top;
1647    return true;
1648}
1649
1650/*===========================================================================
1651 * FUNCTION   : convertLandmarks
1652 *
1653 * DESCRIPTION: helper method to extract the landmarks from face detection info
1654 *
1655 * PARAMETERS :
1656 *   @face   : cam_rect_t struct to convert
1657 *   @landmarks : int32_t destination array
1658 *
1659 *
1660 *==========================================================================*/
1661void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1662{
1663    landmarks[0] = face.left_eye_center.x;
1664    landmarks[1] = face.left_eye_center.y;
1665    landmarks[2] = face.right_eye_center.y;
1666    landmarks[3] = face.right_eye_center.y;
1667    landmarks[4] = face.mouth_center.x;
1668    landmarks[5] = face.mouth_center.y;
1669}
1670
1671#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1672/*===========================================================================
1673 * FUNCTION   : initCapabilities
1674 *
1675 * DESCRIPTION: initialize camera capabilities in static data struct
1676 *
1677 * PARAMETERS :
1678 *   @cameraId  : camera Id
1679 *
1680 * RETURN     : int32_t type of status
1681 *              NO_ERROR  -- success
1682 *              none-zero failure code
1683 *==========================================================================*/
1684int QCamera3HardwareInterface::initCapabilities(int cameraId)
1685{
1686    int rc = 0;
1687    mm_camera_vtbl_t *cameraHandle = NULL;
1688    QCamera3HeapMemory *capabilityHeap = NULL;
1689
1690    cameraHandle = camera_open(cameraId);
1691    if (!cameraHandle) {
1692        ALOGE("%s: camera_open failed", __func__);
1693        rc = -1;
1694        goto open_failed;
1695    }
1696
1697    capabilityHeap = new QCamera3HeapMemory();
1698    if (capabilityHeap == NULL) {
1699        ALOGE("%s: creation of capabilityHeap failed", __func__);
1700        goto heap_creation_failed;
1701    }
1702    /* Allocate memory for capability buffer */
1703    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1704    if(rc != OK) {
1705        ALOGE("%s: No memory for cappability", __func__);
1706        goto allocate_failed;
1707    }
1708
1709    /* Map memory for capability buffer */
1710    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1711    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1712                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1713                                capabilityHeap->getFd(0),
1714                                sizeof(cam_capability_t));
1715    if(rc < 0) {
1716        ALOGE("%s: failed to map capability buffer", __func__);
1717        goto map_failed;
1718    }
1719
1720    /* Query Capability */
1721    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1722    if(rc < 0) {
1723        ALOGE("%s: failed to query capability",__func__);
1724        goto query_failed;
1725    }
1726    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1727    if (!gCamCapability[cameraId]) {
1728        ALOGE("%s: out of memory", __func__);
1729        goto query_failed;
1730    }
1731    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1732                                        sizeof(cam_capability_t));
1733    rc = 0;
1734
1735query_failed:
1736    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1737                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1738map_failed:
1739    capabilityHeap->deallocate();
1740allocate_failed:
1741    delete capabilityHeap;
1742heap_creation_failed:
1743    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1744    cameraHandle = NULL;
1745open_failed:
1746    return rc;
1747}
1748
1749/*===========================================================================
1750 * FUNCTION   : initParameters
1751 *
1752 * DESCRIPTION: initialize camera parameters
1753 *
1754 * PARAMETERS :
1755 *
1756 * RETURN     : int32_t type of status
1757 *              NO_ERROR  -- success
1758 *              none-zero failure code
1759 *==========================================================================*/
1760int QCamera3HardwareInterface::initParameters()
1761{
1762    int rc = 0;
1763
1764    //Allocate Set Param Buffer
1765    mParamHeap = new QCamera3HeapMemory();
1766    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1767    if(rc != OK) {
1768        rc = NO_MEMORY;
1769        ALOGE("Failed to allocate SETPARM Heap memory");
1770        delete mParamHeap;
1771        mParamHeap = NULL;
1772        return rc;
1773    }
1774
1775    //Map memory for parameters buffer
1776    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1777            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1778            mParamHeap->getFd(0),
1779            sizeof(parm_buffer_t));
1780    if(rc < 0) {
1781        ALOGE("%s:failed to map SETPARM buffer",__func__);
1782        rc = FAILED_TRANSACTION;
1783        mParamHeap->deallocate();
1784        delete mParamHeap;
1785        mParamHeap = NULL;
1786        return rc;
1787    }
1788
1789    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1790    return rc;
1791}
1792
1793/*===========================================================================
1794 * FUNCTION   : deinitParameters
1795 *
1796 * DESCRIPTION: de-initialize camera parameters
1797 *
1798 * PARAMETERS :
1799 *
1800 * RETURN     : NONE
1801 *==========================================================================*/
1802void QCamera3HardwareInterface::deinitParameters()
1803{
1804    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1805            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1806
1807    mParamHeap->deallocate();
1808    delete mParamHeap;
1809    mParamHeap = NULL;
1810
1811    mParameters = NULL;
1812}
1813
1814/*===========================================================================
1815 * FUNCTION   : calcMaxJpegSize
1816 *
1817 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1818 *
1819 * PARAMETERS :
1820 *
1821 * RETURN     : max_jpeg_size
1822 *==========================================================================*/
1823int QCamera3HardwareInterface::calcMaxJpegSize()
1824{
1825    int32_t max_jpeg_size = 0;
1826    int temp_width, temp_height;
1827    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1828        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1829        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1830        if (temp_width * temp_height > max_jpeg_size ) {
1831            max_jpeg_size = temp_width * temp_height;
1832        }
1833    }
1834    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1835    return max_jpeg_size;
1836}
1837
1838/*===========================================================================
1839 * FUNCTION   : initStaticMetadata
1840 *
1841 * DESCRIPTION: initialize the static metadata
1842 *
1843 * PARAMETERS :
1844 *   @cameraId  : camera Id
1845 *
1846 * RETURN     : int32_t type of status
1847 *              0  -- success
1848 *              non-zero failure code
1849 *==========================================================================*/
1850int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1851{
1852    int rc = 0;
1853    CameraMetadata staticInfo;
1854
1855    /* android.info: hardware level */
1856    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1857    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1858        &supportedHardwareLevel, 1);
1859
1860    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1861    /*HAL 3 only*/
1862    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1863                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1864
1865    /*hard coded for now but this should come from sensor*/
1866    float min_focus_distance;
1867    if(facingBack){
1868        min_focus_distance = 10;
1869    } else {
1870        min_focus_distance = 0;
1871    }
1872    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1873                    &min_focus_distance, 1);
1874
1875    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1876                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1877
1878    /*should be using focal lengths but sensor doesn't provide that info now*/
1879    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1880                      &gCamCapability[cameraId]->focal_length,
1881                      1);
1882
1883    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1884                      gCamCapability[cameraId]->apertures,
1885                      gCamCapability[cameraId]->apertures_count);
1886
1887    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1888                gCamCapability[cameraId]->filter_densities,
1889                gCamCapability[cameraId]->filter_densities_count);
1890
1891
1892    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1893                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1894                      gCamCapability[cameraId]->optical_stab_modes_count);
1895
1896    staticInfo.update(ANDROID_LENS_POSITION,
1897                      gCamCapability[cameraId]->lens_position,
1898                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1899
1900    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1901                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1902    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1903                      lens_shading_map_size,
1904                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1905
1906    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1907                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1908    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1909            geo_correction_map_size,
1910            sizeof(geo_correction_map_size)/sizeof(int32_t));
1911
1912    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1913                       gCamCapability[cameraId]->geo_correction_map,
1914                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1915
1916    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1917            gCamCapability[cameraId]->sensor_physical_size, 2);
1918
1919    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1920            gCamCapability[cameraId]->exposure_time_range, 2);
1921
1922    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1923            &gCamCapability[cameraId]->max_frame_duration, 1);
1924
1925
1926    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1927                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1928
1929    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1930                                               gCamCapability[cameraId]->pixel_array_size.height};
1931    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1932                      pixel_array_size, 2);
1933
1934    int32_t active_array_size[] = {0, 0,
1935                                                gCamCapability[cameraId]->active_array_size.width,
1936                                                gCamCapability[cameraId]->active_array_size.height};
1937    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1938                      active_array_size, 4);
1939
1940    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1941            &gCamCapability[cameraId]->white_level, 1);
1942
1943    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1944            gCamCapability[cameraId]->black_level_pattern, 4);
1945
1946    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1947                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1948
1949    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1950                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1951
1952    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1953                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
1954
1955    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1956                      &gCamCapability[cameraId]->histogram_size, 1);
1957
1958    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1959            &gCamCapability[cameraId]->max_histogram_count, 1);
1960
1961    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1962                                                gCamCapability[cameraId]->sharpness_map_size.height};
1963
1964    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1965            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1966
1967    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1968            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1969
1970
1971    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1972                      &gCamCapability[cameraId]->raw_min_duration,
1973                       1);
1974
1975    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1976                                                HAL_PIXEL_FORMAT_BLOB};
1977    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1978    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1979                      scalar_formats,
1980                      scalar_formats_count);
1981
1982    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1983    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1984              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1985              available_processed_sizes);
1986    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1987                available_processed_sizes,
1988                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
1989
1990    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1991                      &gCamCapability[cameraId]->jpeg_min_duration[0],
1992                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
1993
1994    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1995    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1996                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1997                 available_fps_ranges);
1998    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1999            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2000
2001    camera_metadata_rational exposureCompensationStep = {
2002            gCamCapability[cameraId]->exp_compensation_step.numerator,
2003            gCamCapability[cameraId]->exp_compensation_step.denominator};
2004    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2005                      &exposureCompensationStep, 1);
2006
2007    /*TO DO*/
2008    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2009    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2010                      availableVstabModes, sizeof(availableVstabModes));
2011
2012    /*HAL 1 and HAL 3 common*/
2013    float maxZoom = 4;
2014    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2015            &maxZoom, 1);
2016
2017    int32_t max3aRegions = 1;
2018    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2019            &max3aRegions, 1);
2020
2021    uint8_t availableFaceDetectModes[] = {
2022            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2023            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2024    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2025                      availableFaceDetectModes,
2026                      sizeof(availableFaceDetectModes));
2027
2028    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2029                                       gCamCapability[cameraId]->raw_dim.height};
2030    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2031                      raw_size,
2032                      sizeof(raw_size)/sizeof(uint32_t));
2033
2034    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2035                                                        gCamCapability[cameraId]->exposure_compensation_max};
2036    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2037            exposureCompensationRange,
2038            sizeof(exposureCompensationRange)/sizeof(int32_t));
2039
2040    uint8_t lensFacing = (facingBack) ?
2041            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2042    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2043
2044    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2045                available_processed_sizes,
2046                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2047
2048    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2049                      available_thumbnail_sizes,
2050                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2051
2052    int32_t max_jpeg_size = 0;
2053    int temp_width, temp_height;
2054    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2055        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2056        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2057        if (temp_width * temp_height > max_jpeg_size ) {
2058            max_jpeg_size = temp_width * temp_height;
2059        }
2060    }
2061    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2062    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2063                      &max_jpeg_size, 1);
2064
2065    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2066    int32_t size = 0;
2067    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2068        int val = lookupFwkName(EFFECT_MODES_MAP,
2069                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2070                                   gCamCapability[cameraId]->supported_effects[i]);
2071        if (val != NAME_NOT_FOUND) {
2072            avail_effects[size] = (uint8_t)val;
2073            size++;
2074        }
2075    }
2076    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2077                      avail_effects,
2078                      size);
2079
2080    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2081    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2082    int32_t supported_scene_modes_cnt = 0;
2083    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2084        int val = lookupFwkName(SCENE_MODES_MAP,
2085                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2086                                gCamCapability[cameraId]->supported_scene_modes[i]);
2087        if (val != NAME_NOT_FOUND) {
2088            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2089            supported_indexes[supported_scene_modes_cnt] = i;
2090            supported_scene_modes_cnt++;
2091        }
2092    }
2093
2094    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2095                      avail_scene_modes,
2096                      supported_scene_modes_cnt);
2097
2098    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2099    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2100                      supported_scene_modes_cnt,
2101                      scene_mode_overrides,
2102                      supported_indexes,
2103                      cameraId);
2104    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2105                      scene_mode_overrides,
2106                      supported_scene_modes_cnt*3);
2107
2108    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2109    size = 0;
2110    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2111        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2112                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2113                                 gCamCapability[cameraId]->supported_antibandings[i]);
2114        if (val != NAME_NOT_FOUND) {
2115            avail_antibanding_modes[size] = (uint8_t)val;
2116            size++;
2117        }
2118
2119    }
2120    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2121                      avail_antibanding_modes,
2122                      size);
2123
2124    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2125    size = 0;
2126    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2127        int val = lookupFwkName(FOCUS_MODES_MAP,
2128                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2129                                gCamCapability[cameraId]->supported_focus_modes[i]);
2130        if (val != NAME_NOT_FOUND) {
2131            avail_af_modes[size] = (uint8_t)val;
2132            size++;
2133        }
2134    }
2135    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2136                      avail_af_modes,
2137                      size);
2138
2139    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2140    size = 0;
2141    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2142        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2143                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2144                                    gCamCapability[cameraId]->supported_white_balances[i]);
2145        if (val != NAME_NOT_FOUND) {
2146            avail_awb_modes[size] = (uint8_t)val;
2147            size++;
2148        }
2149    }
2150    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2151                      avail_awb_modes,
2152                      size);
2153
2154    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2155    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2156      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2157
2158    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2159            available_flash_levels,
2160            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2161
2162
2163    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2164    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2165            &flashAvailable, 1);
2166
2167    uint8_t avail_ae_modes[5];
2168    size = 0;
2169    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2170        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2171        size++;
2172    }
2173    if (flashAvailable) {
2174        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2175        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2176        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2177    }
2178    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2179                      avail_ae_modes,
2180                      size);
2181
2182    int32_t sensitivity_range[2];
2183    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2184    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2185    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2186                      sensitivity_range,
2187                      sizeof(sensitivity_range) / sizeof(int32_t));
2188
2189    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2190                      &gCamCapability[cameraId]->max_analog_sensitivity,
2191                      1);
2192
2193    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2194                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2195                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2196
2197    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2198    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2199                      &sensor_orientation,
2200                      1);
2201
2202    int32_t max_output_streams[3] = {1, 3, 1};
2203    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2204                      max_output_streams,
2205                      3);
2206
2207    gStaticMetadata[cameraId] = staticInfo.release();
2208    return rc;
2209}
2210
2211/*===========================================================================
2212 * FUNCTION   : makeTable
2213 *
2214 * DESCRIPTION: make a table of sizes
2215 *
2216 * PARAMETERS :
2217 *
2218 *
2219 *==========================================================================*/
2220void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2221                                          int32_t* sizeTable)
2222{
2223    int j = 0;
2224    for (int i = 0; i < size; i++) {
2225        sizeTable[j] = dimTable[i].width;
2226        sizeTable[j+1] = dimTable[i].height;
2227        j+=2;
2228    }
2229}
2230
2231/*===========================================================================
2232 * FUNCTION   : makeFPSTable
2233 *
2234 * DESCRIPTION: make a table of fps ranges
2235 *
2236 * PARAMETERS :
2237 *
2238 *==========================================================================*/
2239void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2240                                          int32_t* fpsRangesTable)
2241{
2242    int j = 0;
2243    for (int i = 0; i < size; i++) {
2244        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2245        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2246        j+=2;
2247    }
2248}
2249
2250/*===========================================================================
2251 * FUNCTION   : makeOverridesList
2252 *
2253 * DESCRIPTION: make a list of scene mode overrides
2254 *
2255 * PARAMETERS :
2256 *
2257 *
2258 *==========================================================================*/
2259void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2260                                                  uint8_t size, uint8_t* overridesList,
2261                                                  uint8_t* supported_indexes,
2262                                                  int camera_id)
2263{
2264    /*daemon will give a list of overrides for all scene modes.
2265      However we should send the fwk only the overrides for the scene modes
2266      supported by the framework*/
2267    int j = 0, index = 0, supt = 0;
2268    uint8_t focus_override;
2269    for (int i = 0; i < size; i++) {
2270        supt = 0;
2271        index = supported_indexes[i];
2272        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2273        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2274                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2275                                                    overridesTable[index].awb_mode);
2276        focus_override = (uint8_t)overridesTable[index].af_mode;
2277        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2278           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2279              supt = 1;
2280              break;
2281           }
2282        }
2283        if (supt) {
2284           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2285                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2286                                              focus_override);
2287        } else {
2288           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2289        }
2290        j+=3;
2291    }
2292}
2293
2294/*===========================================================================
2295 * FUNCTION   : getPreviewHalPixelFormat
2296 *
2297 * DESCRIPTION: convert the format to type recognized by framework
2298 *
2299 * PARAMETERS : format : the format from backend
2300 *
2301 ** RETURN    : format recognized by framework
2302 *
2303 *==========================================================================*/
2304int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2305{
2306    int32_t halPixelFormat;
2307
2308    switch (format) {
2309    case CAM_FORMAT_YUV_420_NV12:
2310        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2311        break;
2312    case CAM_FORMAT_YUV_420_NV21:
2313        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2314        break;
2315    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2316        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2317        break;
2318    case CAM_FORMAT_YUV_420_YV12:
2319        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2320        break;
2321    case CAM_FORMAT_YUV_422_NV16:
2322    case CAM_FORMAT_YUV_422_NV61:
2323    default:
2324        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2325        break;
2326    }
2327    return halPixelFormat;
2328}
2329
2330/*===========================================================================
2331 * FUNCTION   : getSensorSensitivity
2332 *
2333 * DESCRIPTION: convert iso_mode to an integer value
2334 *
2335 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2336 *
2337 ** RETURN    : sensitivity supported by sensor
2338 *
2339 *==========================================================================*/
2340int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2341{
2342    int32_t sensitivity;
2343
2344    switch (iso_mode) {
2345    case CAM_ISO_MODE_100:
2346        sensitivity = 100;
2347        break;
2348    case CAM_ISO_MODE_200:
2349        sensitivity = 200;
2350        break;
2351    case CAM_ISO_MODE_400:
2352        sensitivity = 400;
2353        break;
2354    case CAM_ISO_MODE_800:
2355        sensitivity = 800;
2356        break;
2357    case CAM_ISO_MODE_1600:
2358        sensitivity = 1600;
2359        break;
2360    default:
2361        sensitivity = -1;
2362        break;
2363    }
2364    return sensitivity;
2365}
2366
2367
2368/*===========================================================================
2369 * FUNCTION   : AddSetParmEntryToBatch
2370 *
2371 * DESCRIPTION: add set parameter entry into batch
2372 *
2373 * PARAMETERS :
2374 *   @p_table     : ptr to parameter buffer
2375 *   @paramType   : parameter type
2376 *   @paramLength : length of parameter value
2377 *   @paramValue  : ptr to parameter value
2378 *
2379 * RETURN     : int32_t type of status
2380 *              NO_ERROR  -- success
2381 *              none-zero failure code
2382 *==========================================================================*/
2383int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2384                                                          cam_intf_parm_type_t paramType,
2385                                                          uint32_t paramLength,
2386                                                          void *paramValue)
2387{
2388    int position = paramType;
2389    int current, next;
2390
2391    /*************************************************************************
2392    *                 Code to take care of linking next flags                *
2393    *************************************************************************/
2394    current = GET_FIRST_PARAM_ID(p_table);
2395    if (position == current){
2396        //DO NOTHING
2397    } else if (position < current){
2398        SET_NEXT_PARAM_ID(position, p_table, current);
2399        SET_FIRST_PARAM_ID(p_table, position);
2400    } else {
2401        /* Search for the position in the linked list where we need to slot in*/
2402        while (position > GET_NEXT_PARAM_ID(current, p_table))
2403            current = GET_NEXT_PARAM_ID(current, p_table);
2404
2405        /*If node already exists no need to alter linking*/
2406        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2407            next = GET_NEXT_PARAM_ID(current, p_table);
2408            SET_NEXT_PARAM_ID(current, p_table, position);
2409            SET_NEXT_PARAM_ID(position, p_table, next);
2410        }
2411    }
2412
2413    /*************************************************************************
2414    *                   Copy contents into entry                             *
2415    *************************************************************************/
2416
2417    if (paramLength > sizeof(parm_type_t)) {
2418        ALOGE("%s:Size of input larger than max entry size",__func__);
2419        return BAD_VALUE;
2420    }
2421    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2422    return NO_ERROR;
2423}
2424
2425/*===========================================================================
2426 * FUNCTION   : lookupFwkName
2427 *
2428 * DESCRIPTION: In case the enum is not same in fwk and backend
2429 *              make sure the parameter is correctly propogated
2430 *
2431 * PARAMETERS  :
2432 *   @arr      : map between the two enums
2433 *   @len      : len of the map
2434 *   @hal_name : name of the hal_parm to map
2435 *
2436 * RETURN     : int type of status
2437 *              fwk_name  -- success
2438 *              none-zero failure code
2439 *==========================================================================*/
2440int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2441                                             int len, int hal_name)
2442{
2443
2444    for (int i = 0; i < len; i++) {
2445        if (arr[i].hal_name == hal_name)
2446            return arr[i].fwk_name;
2447    }
2448
2449    /* Not able to find matching framework type is not necessarily
2450     * an error case. This happens when mm-camera supports more attributes
2451     * than the frameworks do */
2452    ALOGD("%s: Cannot find matching framework type", __func__);
2453    return NAME_NOT_FOUND;
2454}
2455
2456/*===========================================================================
2457 * FUNCTION   : lookupHalName
2458 *
2459 * DESCRIPTION: In case the enum is not same in fwk and backend
2460 *              make sure the parameter is correctly propogated
2461 *
2462 * PARAMETERS  :
2463 *   @arr      : map between the two enums
2464 *   @len      : len of the map
2465 *   @fwk_name : name of the hal_parm to map
2466 *
2467 * RETURN     : int32_t type of status
2468 *              hal_name  -- success
2469 *              none-zero failure code
2470 *==========================================================================*/
2471int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2472                                             int len, int fwk_name)
2473{
2474    for (int i = 0; i < len; i++) {
2475       if (arr[i].fwk_name == fwk_name)
2476           return arr[i].hal_name;
2477    }
2478    ALOGE("%s: Cannot find matching hal type", __func__);
2479    return NAME_NOT_FOUND;
2480}
2481
2482/*===========================================================================
2483 * FUNCTION   : getCapabilities
2484 *
2485 * DESCRIPTION: query camera capabilities
2486 *
2487 * PARAMETERS :
2488 *   @cameraId  : camera Id
2489 *   @info      : camera info struct to be filled in with camera capabilities
2490 *
2491 * RETURN     : int32_t type of status
2492 *              NO_ERROR  -- success
2493 *              none-zero failure code
2494 *==========================================================================*/
2495int QCamera3HardwareInterface::getCamInfo(int cameraId,
2496                                    struct camera_info *info)
2497{
2498    int rc = 0;
2499
2500    if (NULL == gCamCapability[cameraId]) {
2501        rc = initCapabilities(cameraId);
2502        if (rc < 0) {
2503            //pthread_mutex_unlock(&g_camlock);
2504            return rc;
2505        }
2506    }
2507
2508    if (NULL == gStaticMetadata[cameraId]) {
2509        rc = initStaticMetadata(cameraId);
2510        if (rc < 0) {
2511            return rc;
2512        }
2513    }
2514
2515    switch(gCamCapability[cameraId]->position) {
2516    case CAM_POSITION_BACK:
2517        info->facing = CAMERA_FACING_BACK;
2518        break;
2519
2520    case CAM_POSITION_FRONT:
2521        info->facing = CAMERA_FACING_FRONT;
2522        break;
2523
2524    default:
2525        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2526        rc = -1;
2527        break;
2528    }
2529
2530
2531    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2532    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2533    info->static_camera_characteristics = gStaticMetadata[cameraId];
2534
2535    return rc;
2536}
2537
2538/*===========================================================================
2539 * FUNCTION   : translateMetadata
2540 *
2541 * DESCRIPTION: translate the metadata into camera_metadata_t
2542 *
2543 * PARAMETERS : type of the request
2544 *
2545 *
2546 * RETURN     : success: camera_metadata_t*
2547 *              failure: NULL
2548 *
2549 *==========================================================================*/
2550camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2551{
2552    pthread_mutex_lock(&mMutex);
2553
2554    if (mDefaultMetadata[type] != NULL) {
2555        pthread_mutex_unlock(&mMutex);
2556        return mDefaultMetadata[type];
2557    }
2558    //first time we are handling this request
2559    //fill up the metadata structure using the wrapper class
2560    CameraMetadata settings;
2561    //translate from cam_capability_t to camera_metadata_tag_t
2562    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2563    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2564
2565    /*control*/
2566
2567    uint8_t controlIntent = 0;
2568    switch (type) {
2569      case CAMERA3_TEMPLATE_PREVIEW:
2570        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2571        break;
2572      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2573        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2574        break;
2575      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2576        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2577        break;
2578      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2579        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2580        break;
2581      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2582        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2583        break;
2584      default:
2585        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2586        break;
2587    }
2588    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2589
2590    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2591            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2592
2593    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2594    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2595
2596    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2597    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2598
2599    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2600    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2601
2602    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2603    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2604
2605    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2606    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2607
2608    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2609    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2610
2611    static uint8_t focusMode;
2612    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2613        ALOGE("%s: Setting focus mode to auto", __func__);
2614        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2615    } else {
2616        ALOGE("%s: Setting focus mode to off", __func__);
2617        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2618    }
2619    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2620
2621    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2622    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2623
2624    /*flash*/
2625    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2626    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2627
2628    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2629    settings.update(ANDROID_FLASH_FIRING_POWER,
2630            &flashFiringLevel, 1);
2631
2632    /* lens */
2633    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2634    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2635
2636    if (gCamCapability[mCameraId]->filter_densities_count) {
2637        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2638        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2639                        gCamCapability[mCameraId]->filter_densities_count);
2640    }
2641
2642    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2643    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2644
2645    /* frame duration */
2646    int64_t default_frame_duration = NSEC_PER_33MSEC;
2647    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2648
2649    /* sensitivity */
2650    int32_t default_sensitivity = 100;
2651    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2652
2653    mDefaultMetadata[type] = settings.release();
2654
2655    pthread_mutex_unlock(&mMutex);
2656    return mDefaultMetadata[type];
2657}
2658
2659/*===========================================================================
2660 * FUNCTION   : setFrameParameters
2661 *
2662 * DESCRIPTION: set parameters per frame as requested in the metadata from
2663 *              framework
2664 *
2665 * PARAMETERS :
2666 *   @request   : request that needs to be serviced
2667 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2668 *
2669 * RETURN     : success: NO_ERROR
2670 *              failure:
2671 *==========================================================================*/
2672int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2673                    uint32_t streamTypeMask)
2674{
2675    /*translate from camera_metadata_t type to parm_type_t*/
2676    int rc = 0;
2677    if (request->settings == NULL && mFirstRequest) {
2678        /*settings cannot be null for the first request*/
2679        return BAD_VALUE;
2680    }
2681
2682    int32_t hal_version = CAM_HAL_V3;
2683
2684    memset(mParameters, 0, sizeof(parm_buffer_t));
2685    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2686    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2687                sizeof(hal_version), &hal_version);
2688    if (rc < 0) {
2689        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2690        return BAD_VALUE;
2691    }
2692
2693    /*we need to update the frame number in the parameters*/
2694    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2695                                sizeof(request->frame_number), &(request->frame_number));
2696    if (rc < 0) {
2697        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2698        return BAD_VALUE;
2699    }
2700
2701    /* Update stream id mask where buffers are requested */
2702    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2703                                sizeof(streamTypeMask), &streamTypeMask);
2704    if (rc < 0) {
2705        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2706        return BAD_VALUE;
2707    }
2708
2709    if(request->settings != NULL){
2710        rc = translateMetadataToParameters(request);
2711    }
2712    /*set the parameters to backend*/
2713    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2714    return rc;
2715}
2716
2717/*===========================================================================
2718 * FUNCTION   : translateMetadataToParameters
2719 *
2720 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2721 *
2722 *
2723 * PARAMETERS :
2724 *   @request  : request sent from framework
2725 *
2726 *
2727 * RETURN     : success: NO_ERROR
2728 *              failure:
2729 *==========================================================================*/
2730int QCamera3HardwareInterface::translateMetadataToParameters
2731                                  (const camera3_capture_request_t *request)
2732{
2733    int rc = 0;
2734    CameraMetadata frame_settings;
2735    frame_settings = request->settings;
2736
2737    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2738        int32_t antibandingMode =
2739            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2740        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2741                sizeof(antibandingMode), &antibandingMode);
2742    }
2743
2744    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2745        int32_t expCompensation = frame_settings.find(
2746            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2747        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2748            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2749        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2750            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2751        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2752          sizeof(expCompensation), &expCompensation);
2753    }
2754
2755    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2756        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2757        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2758                sizeof(aeLock), &aeLock);
2759    }
2760    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2761        cam_fps_range_t fps_range;
2762        fps_range.min_fps =
2763            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2764        fps_range.max_fps =
2765            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2766        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2767                sizeof(fps_range), &fps_range);
2768    }
2769
2770    float focalDistance = -1.0;
2771    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2772        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2773        rc = AddSetParmEntryToBatch(mParameters,
2774                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2775                sizeof(focalDistance), &focalDistance);
2776    }
2777
2778    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2779        uint8_t fwk_focusMode =
2780            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2781        uint8_t focusMode;
2782        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2783            focusMode = CAM_FOCUS_MODE_INFINITY;
2784        } else{
2785         focusMode = lookupHalName(FOCUS_MODES_MAP,
2786                                   sizeof(FOCUS_MODES_MAP),
2787                                   fwk_focusMode);
2788        }
2789        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2790                sizeof(focusMode), &focusMode);
2791    }
2792
2793    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2794        uint8_t awbLock =
2795            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2796        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2797                sizeof(awbLock), &awbLock);
2798    }
2799
2800    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2801        uint8_t fwk_whiteLevel =
2802            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2803        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2804                sizeof(WHITE_BALANCE_MODES_MAP),
2805                fwk_whiteLevel);
2806        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2807                sizeof(whiteLevel), &whiteLevel);
2808    }
2809
2810    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2811        uint8_t fwk_effectMode =
2812            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2813        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2814                sizeof(EFFECT_MODES_MAP),
2815                fwk_effectMode);
2816        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2817                sizeof(effectMode), &effectMode);
2818    }
2819
2820    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2821        uint8_t fwk_aeMode =
2822            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2823        uint8_t aeMode;
2824        int32_t redeye;
2825
2826        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2827            aeMode = CAM_AE_MODE_OFF;
2828        } else {
2829            aeMode = CAM_AE_MODE_ON;
2830        }
2831        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2832            redeye = 1;
2833        } else {
2834            redeye = 0;
2835        }
2836
2837        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2838                                          sizeof(AE_FLASH_MODE_MAP),
2839                                          fwk_aeMode);
2840        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2841                sizeof(aeMode), &aeMode);
2842        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2843                sizeof(flashMode), &flashMode);
2844        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2845                sizeof(redeye), &redeye);
2846    }
2847
2848    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2849        uint8_t colorCorrectMode =
2850            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2851        rc =
2852            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2853                    sizeof(colorCorrectMode), &colorCorrectMode);
2854    }
2855
2856    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2857        cam_color_correct_gains_t colorCorrectGains;
2858        for (int i = 0; i < 4; i++) {
2859            colorCorrectGains.gains[i] =
2860                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2861        }
2862        rc =
2863            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2864                    sizeof(colorCorrectGains), &colorCorrectGains);
2865    }
2866
2867    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2868        cam_color_correct_matrix_t colorCorrectTransform;
2869        cam_rational_type_t transform_elem;
2870        int num = 0;
2871        for (int i = 0; i < 3; i++) {
2872           for (int j = 0; j < 3; j++) {
2873              transform_elem.numerator =
2874                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2875              transform_elem.denominator =
2876                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2877              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2878              num++;
2879           }
2880        }
2881        rc =
2882            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
2883                    sizeof(colorCorrectTransform), &colorCorrectTransform);
2884    }
2885
2886    cam_trigger_t aecTrigger;
2887    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2888    aecTrigger.trigger_id = -1;
2889    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2890        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2891        aecTrigger.trigger =
2892            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2893        aecTrigger.trigger_id =
2894            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2895    }
2896    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2897                                sizeof(aecTrigger), &aecTrigger);
2898
2899    /*af_trigger must come with a trigger id*/
2900    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2901        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2902        cam_trigger_t af_trigger;
2903        af_trigger.trigger =
2904            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2905        af_trigger.trigger_id =
2906            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2907        rc = AddSetParmEntryToBatch(mParameters,
2908                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2909    }
2910
2911    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2912        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2913        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2914                sizeof(metaMode), &metaMode);
2915        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2916           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2917           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2918                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2919                                             fwk_sceneMode);
2920           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2921                sizeof(sceneMode), &sceneMode);
2922        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2923           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2924           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2925                sizeof(sceneMode), &sceneMode);
2926        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2927           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2928           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2929                sizeof(sceneMode), &sceneMode);
2930        }
2931    }
2932
2933    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2934        int32_t demosaic =
2935            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2936        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2937                sizeof(demosaic), &demosaic);
2938    }
2939
2940    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2941        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2942        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
2943                sizeof(edgeMode), &edgeMode);
2944    }
2945
2946    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2947        int32_t edgeStrength =
2948            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2949        rc = AddSetParmEntryToBatch(mParameters,
2950                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2951    }
2952
2953    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2954        int32_t respectFlashMode = 1;
2955        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2956            uint8_t fwk_aeMode =
2957                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2958            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
2959                respectFlashMode = 0;
2960                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
2961                    __func__);
2962            }
2963        }
2964        if (respectFlashMode) {
2965            uint8_t flashMode =
2966                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2967            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
2968                                          sizeof(FLASH_MODES_MAP),
2969                                          flashMode);
2970            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
2971            // To check: CAM_INTF_META_FLASH_MODE usage
2972            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2973                          sizeof(flashMode), &flashMode);
2974        }
2975    }
2976
2977    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2978        uint8_t flashPower =
2979            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2980        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2981                sizeof(flashPower), &flashPower);
2982    }
2983
2984    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2985        int64_t flashFiringTime =
2986            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2987        rc = AddSetParmEntryToBatch(mParameters,
2988                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2989    }
2990
2991    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2992        uint8_t geometricMode =
2993            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2994        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2995                sizeof(geometricMode), &geometricMode);
2996    }
2997
2998    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2999        uint8_t geometricStrength =
3000            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3001        rc = AddSetParmEntryToBatch(mParameters,
3002                CAM_INTF_META_GEOMETRIC_STRENGTH,
3003                sizeof(geometricStrength), &geometricStrength);
3004    }
3005
3006    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3007        uint8_t hotPixelMode =
3008            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3009        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3010                sizeof(hotPixelMode), &hotPixelMode);
3011    }
3012
3013    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3014        float lensAperture =
3015            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3016        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3017                sizeof(lensAperture), &lensAperture);
3018    }
3019
3020    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3021        float filterDensity =
3022            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3023        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3024                sizeof(filterDensity), &filterDensity);
3025    }
3026
3027    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3028        float focalLength =
3029            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3030        rc = AddSetParmEntryToBatch(mParameters,
3031                CAM_INTF_META_LENS_FOCAL_LENGTH,
3032                sizeof(focalLength), &focalLength);
3033    }
3034
3035    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3036        uint8_t optStabMode =
3037            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3038        rc = AddSetParmEntryToBatch(mParameters,
3039                CAM_INTF_META_LENS_OPT_STAB_MODE,
3040                sizeof(optStabMode), &optStabMode);
3041    }
3042
3043    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3044        uint8_t noiseRedMode =
3045            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3046        rc = AddSetParmEntryToBatch(mParameters,
3047                CAM_INTF_META_NOISE_REDUCTION_MODE,
3048                sizeof(noiseRedMode), &noiseRedMode);
3049    }
3050
3051    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3052        uint8_t noiseRedStrength =
3053            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3054        rc = AddSetParmEntryToBatch(mParameters,
3055                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3056                sizeof(noiseRedStrength), &noiseRedStrength);
3057    }
3058
3059    cam_crop_region_t scalerCropRegion;
3060    bool scalerCropSet = false;
3061    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3062        scalerCropRegion.left =
3063            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3064        scalerCropRegion.top =
3065            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3066        scalerCropRegion.width =
3067            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3068        scalerCropRegion.height =
3069            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3070        rc = AddSetParmEntryToBatch(mParameters,
3071                CAM_INTF_META_SCALER_CROP_REGION,
3072                sizeof(scalerCropRegion), &scalerCropRegion);
3073        scalerCropSet = true;
3074    }
3075
3076    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3077        int64_t sensorExpTime =
3078            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3079        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3080        rc = AddSetParmEntryToBatch(mParameters,
3081                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3082                sizeof(sensorExpTime), &sensorExpTime);
3083    }
3084
3085    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3086        int64_t sensorFrameDuration =
3087            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3088        int64_t minFrameDuration = getMinFrameDuration(request);
3089        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3090        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3091            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3092        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3093        rc = AddSetParmEntryToBatch(mParameters,
3094                CAM_INTF_META_SENSOR_FRAME_DURATION,
3095                sizeof(sensorFrameDuration), &sensorFrameDuration);
3096    }
3097
3098    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3099        int32_t sensorSensitivity =
3100            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3101        if (sensorSensitivity <
3102                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3103            sensorSensitivity =
3104                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3105        if (sensorSensitivity >
3106                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3107            sensorSensitivity =
3108                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3109        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3110        rc = AddSetParmEntryToBatch(mParameters,
3111                CAM_INTF_META_SENSOR_SENSITIVITY,
3112                sizeof(sensorSensitivity), &sensorSensitivity);
3113    }
3114
3115    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3116        int32_t shadingMode =
3117            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3118        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3119                sizeof(shadingMode), &shadingMode);
3120    }
3121
3122    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3123        uint8_t shadingStrength =
3124            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3125        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3126                sizeof(shadingStrength), &shadingStrength);
3127    }
3128
3129    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3130        uint8_t fwk_facedetectMode =
3131            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3132        uint8_t facedetectMode =
3133            lookupHalName(FACEDETECT_MODES_MAP,
3134                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3135        rc = AddSetParmEntryToBatch(mParameters,
3136                CAM_INTF_META_STATS_FACEDETECT_MODE,
3137                sizeof(facedetectMode), &facedetectMode);
3138    }
3139
3140    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3141        uint8_t histogramMode =
3142            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3143        rc = AddSetParmEntryToBatch(mParameters,
3144                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3145                sizeof(histogramMode), &histogramMode);
3146    }
3147
3148    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3149        uint8_t sharpnessMapMode =
3150            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3151        rc = AddSetParmEntryToBatch(mParameters,
3152                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3153                sizeof(sharpnessMapMode), &sharpnessMapMode);
3154    }
3155
3156    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3157        uint8_t tonemapMode =
3158            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3159        rc = AddSetParmEntryToBatch(mParameters,
3160                CAM_INTF_META_TONEMAP_MODE,
3161                sizeof(tonemapMode), &tonemapMode);
3162    }
3163    int point = 0;
3164    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3165        cam_tonemap_curve_t tonemapCurveBlue;
3166        tonemapCurveBlue.tonemap_points_cnt =
3167           gCamCapability[mCameraId]->max_tone_map_curve_points;
3168        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3169            for (int j = 0; j < 2; j++) {
3170               tonemapCurveBlue.tonemap_points[i][j] =
3171                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3172               point++;
3173            }
3174        }
3175        rc = AddSetParmEntryToBatch(mParameters,
3176                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3177                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3178    }
3179    point = 0;
3180    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3181        cam_tonemap_curve_t tonemapCurveGreen;
3182        tonemapCurveGreen.tonemap_points_cnt =
3183           gCamCapability[mCameraId]->max_tone_map_curve_points;
3184        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3185            for (int j = 0; j < 2; j++) {
3186               tonemapCurveGreen.tonemap_points[i][j] =
3187                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3188               point++;
3189            }
3190        }
3191        rc = AddSetParmEntryToBatch(mParameters,
3192                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3193                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3194    }
3195    point = 0;
3196    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3197        cam_tonemap_curve_t tonemapCurveRed;
3198        tonemapCurveRed.tonemap_points_cnt =
3199           gCamCapability[mCameraId]->max_tone_map_curve_points;
3200        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3201            for (int j = 0; j < 2; j++) {
3202               tonemapCurveRed.tonemap_points[i][j] =
3203                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3204               point++;
3205            }
3206        }
3207        rc = AddSetParmEntryToBatch(mParameters,
3208                CAM_INTF_META_TONEMAP_CURVE_RED,
3209                sizeof(tonemapCurveRed), &tonemapCurveRed);
3210    }
3211
3212    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3213        uint8_t captureIntent =
3214            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3215        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3216                sizeof(captureIntent), &captureIntent);
3217    }
3218
3219    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3220        uint8_t blackLevelLock =
3221            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3222        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3223                sizeof(blackLevelLock), &blackLevelLock);
3224    }
3225
3226    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3227        uint8_t lensShadingMapMode =
3228            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3229        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3230                sizeof(lensShadingMapMode), &lensShadingMapMode);
3231    }
3232
3233    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3234        cam_area_t roi;
3235        bool reset = true;
3236        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3237        if (scalerCropSet) {
3238            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3239        }
3240        if (reset) {
3241            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3242                    sizeof(roi), &roi);
3243        }
3244    }
3245
3246    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3247        cam_area_t roi;
3248        bool reset = true;
3249        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3250        if (scalerCropSet) {
3251            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3252        }
3253        if (reset) {
3254            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3255                    sizeof(roi), &roi);
3256        }
3257    }
3258
3259    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3260        cam_area_t roi;
3261        bool reset = true;
3262        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3263        if (scalerCropSet) {
3264            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3265        }
3266        if (reset) {
3267            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3268                    sizeof(roi), &roi);
3269        }
3270    }
3271    return rc;
3272}
3273
3274/*===========================================================================
3275 * FUNCTION   : getJpegSettings
3276 *
3277 * DESCRIPTION: save the jpeg settings in the HAL
3278 *
3279 *
3280 * PARAMETERS :
3281 *   @settings  : frame settings information from framework
3282 *
3283 *
3284 * RETURN     : success: NO_ERROR
3285 *              failure:
3286 *==========================================================================*/
3287int QCamera3HardwareInterface::getJpegSettings
3288                                  (const camera_metadata_t *settings)
3289{
3290    if (mJpegSettings) {
3291        if (mJpegSettings->gps_timestamp) {
3292            free(mJpegSettings->gps_timestamp);
3293            mJpegSettings->gps_timestamp = NULL;
3294        }
3295        if (mJpegSettings->gps_coordinates) {
3296            for (int i = 0; i < 3; i++) {
3297                free(mJpegSettings->gps_coordinates[i]);
3298                mJpegSettings->gps_coordinates[i] = NULL;
3299            }
3300        }
3301        free(mJpegSettings);
3302        mJpegSettings = NULL;
3303    }
3304    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3305    CameraMetadata jpeg_settings;
3306    jpeg_settings = settings;
3307
3308    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3309        mJpegSettings->jpeg_orientation =
3310            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3311    } else {
3312        mJpegSettings->jpeg_orientation = 0;
3313    }
3314    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3315        mJpegSettings->jpeg_quality =
3316            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3317    } else {
3318        mJpegSettings->jpeg_quality = 85;
3319    }
3320    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3321        mJpegSettings->thumbnail_size.width =
3322            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3323        mJpegSettings->thumbnail_size.height =
3324            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3325    } else {
3326        mJpegSettings->thumbnail_size.width = 0;
3327        mJpegSettings->thumbnail_size.height = 0;
3328    }
3329    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3330        for (int i = 0; i < 3; i++) {
3331            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3332            *(mJpegSettings->gps_coordinates[i]) =
3333                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3334        }
3335    } else{
3336       for (int i = 0; i < 3; i++) {
3337            mJpegSettings->gps_coordinates[i] = NULL;
3338        }
3339    }
3340
3341    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3342        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3343        *(mJpegSettings->gps_timestamp) =
3344            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3345    } else {
3346        mJpegSettings->gps_timestamp = NULL;
3347    }
3348
3349    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3350        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3351        for (int i = 0; i < len; i++) {
3352            mJpegSettings->gps_processing_method[i] =
3353                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3354        }
3355        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3356            mJpegSettings->gps_processing_method[len] = '\0';
3357        }
3358    } else {
3359        mJpegSettings->gps_processing_method[0] = '\0';
3360    }
3361
3362    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3363        mJpegSettings->sensor_sensitivity =
3364            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3365    } else {
3366        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3367    }
3368
3369    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3370
3371    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3372        mJpegSettings->lens_focal_length =
3373            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3374    }
3375    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3376        mJpegSettings->exposure_compensation =
3377            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3378    }
3379    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3380    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3381    mJpegSettings->is_jpeg_format = true;
3382    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3383    return 0;
3384}
3385
3386/*===========================================================================
3387 * FUNCTION   : captureResultCb
3388 *
3389 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3390 *
3391 * PARAMETERS :
3392 *   @frame  : frame information from mm-camera-interface
3393 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3394 *   @userdata: userdata
3395 *
3396 * RETURN     : NONE
3397 *==========================================================================*/
3398void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3399                camera3_stream_buffer_t *buffer,
3400                uint32_t frame_number, void *userdata)
3401{
3402    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3403    if (hw == NULL) {
3404        ALOGE("%s: Invalid hw %p", __func__, hw);
3405        return;
3406    }
3407
3408    hw->captureResultCb(metadata, buffer, frame_number);
3409    return;
3410}
3411
3412
3413/*===========================================================================
3414 * FUNCTION   : initialize
3415 *
3416 * DESCRIPTION: Pass framework callback pointers to HAL
3417 *
3418 * PARAMETERS :
3419 *
3420 *
3421 * RETURN     : Success : 0
3422 *              Failure: -ENODEV
3423 *==========================================================================*/
3424
3425int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3426                                  const camera3_callback_ops_t *callback_ops)
3427{
3428    ALOGV("%s: E", __func__);
3429    QCamera3HardwareInterface *hw =
3430        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3431    if (!hw) {
3432        ALOGE("%s: NULL camera device", __func__);
3433        return -ENODEV;
3434    }
3435
3436    int rc = hw->initialize(callback_ops);
3437    ALOGV("%s: X", __func__);
3438    return rc;
3439}
3440
3441/*===========================================================================
3442 * FUNCTION   : configure_streams
3443 *
3444 * DESCRIPTION:
3445 *
3446 * PARAMETERS :
3447 *
3448 *
3449 * RETURN     : Success: 0
3450 *              Failure: -EINVAL (if stream configuration is invalid)
3451 *                       -ENODEV (fatal error)
3452 *==========================================================================*/
3453
3454int QCamera3HardwareInterface::configure_streams(
3455        const struct camera3_device *device,
3456        camera3_stream_configuration_t *stream_list)
3457{
3458    ALOGV("%s: E", __func__);
3459    QCamera3HardwareInterface *hw =
3460        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3461    if (!hw) {
3462        ALOGE("%s: NULL camera device", __func__);
3463        return -ENODEV;
3464    }
3465    int rc = hw->configureStreams(stream_list);
3466    ALOGV("%s: X", __func__);
3467    return rc;
3468}
3469
3470/*===========================================================================
3471 * FUNCTION   : register_stream_buffers
3472 *
3473 * DESCRIPTION: Register stream buffers with the device
3474 *
3475 * PARAMETERS :
3476 *
3477 * RETURN     :
3478 *==========================================================================*/
3479int QCamera3HardwareInterface::register_stream_buffers(
3480        const struct camera3_device *device,
3481        const camera3_stream_buffer_set_t *buffer_set)
3482{
3483    ALOGV("%s: E", __func__);
3484    QCamera3HardwareInterface *hw =
3485        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3486    if (!hw) {
3487        ALOGE("%s: NULL camera device", __func__);
3488        return -ENODEV;
3489    }
3490    int rc = hw->registerStreamBuffers(buffer_set);
3491    ALOGV("%s: X", __func__);
3492    return rc;
3493}
3494
3495/*===========================================================================
3496 * FUNCTION   : construct_default_request_settings
3497 *
3498 * DESCRIPTION: Configure a settings buffer to meet the required use case
3499 *
3500 * PARAMETERS :
3501 *
3502 *
3503 * RETURN     : Success: Return valid metadata
3504 *              Failure: Return NULL
3505 *==========================================================================*/
3506const camera_metadata_t* QCamera3HardwareInterface::
3507    construct_default_request_settings(const struct camera3_device *device,
3508                                        int type)
3509{
3510
3511    ALOGV("%s: E", __func__);
3512    camera_metadata_t* fwk_metadata = NULL;
3513    QCamera3HardwareInterface *hw =
3514        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3515    if (!hw) {
3516        ALOGE("%s: NULL camera device", __func__);
3517        return NULL;
3518    }
3519
3520    fwk_metadata = hw->translateCapabilityToMetadata(type);
3521
3522    ALOGV("%s: X", __func__);
3523    return fwk_metadata;
3524}
3525
3526/*===========================================================================
3527 * FUNCTION   : process_capture_request
3528 *
3529 * DESCRIPTION:
3530 *
3531 * PARAMETERS :
3532 *
3533 *
3534 * RETURN     :
3535 *==========================================================================*/
3536int QCamera3HardwareInterface::process_capture_request(
3537                    const struct camera3_device *device,
3538                    camera3_capture_request_t *request)
3539{
3540    ALOGV("%s: E", __func__);
3541    QCamera3HardwareInterface *hw =
3542        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3543    if (!hw) {
3544        ALOGE("%s: NULL camera device", __func__);
3545        return -EINVAL;
3546    }
3547
3548    int rc = hw->processCaptureRequest(request);
3549    ALOGV("%s: X", __func__);
3550    return rc;
3551}
3552
3553/*===========================================================================
3554 * FUNCTION   : get_metadata_vendor_tag_ops
3555 *
3556 * DESCRIPTION:
3557 *
3558 * PARAMETERS :
3559 *
3560 *
3561 * RETURN     :
3562 *==========================================================================*/
3563
3564void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3565                const struct camera3_device *device,
3566                vendor_tag_query_ops_t* ops)
3567{
3568    ALOGV("%s: E", __func__);
3569    QCamera3HardwareInterface *hw =
3570        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3571    if (!hw) {
3572        ALOGE("%s: NULL camera device", __func__);
3573        return;
3574    }
3575
3576    hw->getMetadataVendorTagOps(ops);
3577    ALOGV("%s: X", __func__);
3578    return;
3579}
3580
3581/*===========================================================================
3582 * FUNCTION   : dump
3583 *
3584 * DESCRIPTION:
3585 *
3586 * PARAMETERS :
3587 *
3588 *
3589 * RETURN     :
3590 *==========================================================================*/
3591
3592void QCamera3HardwareInterface::dump(
3593                const struct camera3_device *device, int fd)
3594{
3595    ALOGV("%s: E", __func__);
3596    QCamera3HardwareInterface *hw =
3597        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3598    if (!hw) {
3599        ALOGE("%s: NULL camera device", __func__);
3600        return;
3601    }
3602
3603    hw->dump(fd);
3604    ALOGV("%s: X", __func__);
3605    return;
3606}
3607
3608/*===========================================================================
3609 * FUNCTION   : close_camera_device
3610 *
3611 * DESCRIPTION:
3612 *
3613 * PARAMETERS :
3614 *
3615 *
3616 * RETURN     :
3617 *==========================================================================*/
3618int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3619{
3620    ALOGV("%s: E", __func__);
3621    int ret = NO_ERROR;
3622    QCamera3HardwareInterface *hw =
3623        reinterpret_cast<QCamera3HardwareInterface *>(
3624            reinterpret_cast<camera3_device_t *>(device)->priv);
3625    if (!hw) {
3626        ALOGE("NULL camera device");
3627        return BAD_VALUE;
3628    }
3629    delete hw;
3630
3631    pthread_mutex_lock(&mCameraSessionLock);
3632    mCameraSessionActive = 0;
3633    pthread_mutex_unlock(&mCameraSessionLock);
3634    ALOGV("%s: X", __func__);
3635    return ret;
3636}
3637
3638/*===========================================================================
3639 * FUNCTION   : getWaveletDenoiseProcessPlate
3640 *
3641 * DESCRIPTION: query wavelet denoise process plate
3642 *
3643 * PARAMETERS : None
3644 *
3645 * RETURN     : WNR prcocess plate vlaue
3646 *==========================================================================*/
3647cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3648{
3649    char prop[PROPERTY_VALUE_MAX];
3650    memset(prop, 0, sizeof(prop));
3651    property_get("persist.denoise.process.plates", prop, "0");
3652    int processPlate = atoi(prop);
3653    switch(processPlate) {
3654    case 0:
3655        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3656    case 1:
3657        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3658    case 2:
3659        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3660    case 3:
3661        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3662    default:
3663        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3664    }
3665}
3666
3667/*===========================================================================
3668 * FUNCTION   : needRotationReprocess
3669 *
3670 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3671 *
3672 * PARAMETERS : none
3673 *
3674 * RETURN     : true: needed
3675 *              false: no need
3676 *==========================================================================*/
3677bool QCamera3HardwareInterface::needRotationReprocess()
3678{
3679
3680    if (!mJpegSettings->is_jpeg_format) {
3681        // RAW image, no need to reprocess
3682        return false;
3683    }
3684
3685    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3686        mJpegSettings->jpeg_orientation > 0) {
3687        // current rotation is not zero, and pp has the capability to process rotation
3688        ALOGD("%s: need do reprocess for rotation", __func__);
3689        return true;
3690    }
3691
3692    return false;
3693}
3694
3695/*===========================================================================
3696 * FUNCTION   : needReprocess
3697 *
3698 * DESCRIPTION: if reprocess in needed
3699 *
3700 * PARAMETERS : none
3701 *
3702 * RETURN     : true: needed
3703 *              false: no need
3704 *==========================================================================*/
3705bool QCamera3HardwareInterface::needReprocess()
3706{
3707    if (!mJpegSettings->is_jpeg_format) {
3708        // RAW image, no need to reprocess
3709        return false;
3710    }
3711
3712    if ((mJpegSettings->min_required_pp_mask > 0) ||
3713         isWNREnabled()) {
3714        // TODO: add for ZSL HDR later
3715        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3716        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3717        return true;
3718    }
3719    return needRotationReprocess();
3720}
3721
3722/*===========================================================================
3723 * FUNCTION   : addOnlineReprocChannel
3724 *
3725 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3726 *              coming from input channel
3727 *
3728 * PARAMETERS :
3729 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3730 *
3731 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3732 *==========================================================================*/
3733QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3734                                                      QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3735{
3736    int32_t rc = NO_ERROR;
3737    QCamera3ReprocessChannel *pChannel = NULL;
3738    if (pInputChannel == NULL) {
3739        ALOGE("%s: input channel obj is NULL", __func__);
3740        return NULL;
3741    }
3742
3743    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3744            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3745    if (NULL == pChannel) {
3746        ALOGE("%s: no mem for reprocess channel", __func__);
3747        return NULL;
3748    }
3749
3750    // Capture channel, only need snapshot and postview streams start together
3751    mm_camera_channel_attr_t attr;
3752    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3753    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3754    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3755    rc = pChannel->initialize();
3756    if (rc != NO_ERROR) {
3757        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3758        delete pChannel;
3759        return NULL;
3760    }
3761
3762    // pp feature config
3763    cam_pp_feature_config_t pp_config;
3764    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3765    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3766        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3767        pp_config.sharpness = 10;
3768    }
3769
3770    if (isWNREnabled()) {
3771        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3772        pp_config.denoise2d.denoise_enable = 1;
3773        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3774    }
3775    if (needRotationReprocess()) {
3776        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3777        int rotation = mJpegSettings->jpeg_orientation;
3778        if (rotation == 0) {
3779            pp_config.rotation = ROTATE_0;
3780        } else if (rotation == 90) {
3781            pp_config.rotation = ROTATE_90;
3782        } else if (rotation == 180) {
3783            pp_config.rotation = ROTATE_180;
3784        } else if (rotation == 270) {
3785            pp_config.rotation = ROTATE_270;
3786        }
3787    }
3788
3789   rc = pChannel->addReprocStreamsFromSource(pp_config,
3790                                             pInputChannel,
3791                                             mMetadataChannel);
3792
3793    if (rc != NO_ERROR) {
3794        delete pChannel;
3795        return NULL;
3796    }
3797    return pChannel;
3798}
3799
3800int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3801{
3802    return gCamCapability[mCameraId]->min_num_pp_bufs;
3803}
3804
3805bool QCamera3HardwareInterface::isWNREnabled() {
3806    return gCamCapability[mCameraId]->isWnrSupported;
3807}
3808
3809}; //end namespace qcamera
3810