QCamera3HWI.cpp revision 62442157951e325f2e7b9d81e669992e8ba5564a
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48
49#define MAX(a, b) ((a) > (b) ? (a) : (b))
50
51#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
52cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
53parm_buffer_t *prevSettings;
54const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
55
56pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
57    PTHREAD_MUTEX_INITIALIZER;
58unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
59
60const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
61    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
62    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
63    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
64    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
65    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
66    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
67    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
68    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
70};
71
72const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
73    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
74    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
75    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
76    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
77    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
78    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
79    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
81    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
82};
83
84const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
85    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
86    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
88    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
90    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
91    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
92    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
93    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
94    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
95    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
96    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
97    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
98    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
99    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
100};
101
102const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
103    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
104    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
105    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
106    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
107    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
116};
117
118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
119    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
120    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
122    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
124};
125
126const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
127    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
128    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
129    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
130};
131
132const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
133    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
135};
136
137const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
138                                             320, 240, 176, 144, 0, 0};
139
140camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
141    initialize:                         QCamera3HardwareInterface::initialize,
142    configure_streams:                  QCamera3HardwareInterface::configure_streams,
143    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
144    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
145    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
146    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
147    dump:                               QCamera3HardwareInterface::dump,
148};
149
150
151/*===========================================================================
152 * FUNCTION   : QCamera3HardwareInterface
153 *
154 * DESCRIPTION: constructor of QCamera3HardwareInterface
155 *
156 * PARAMETERS :
157 *   @cameraId  : camera ID
158 *
159 * RETURN     : none
160 *==========================================================================*/
161QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
162    : mCameraId(cameraId),
163      mCameraHandle(NULL),
164      mCameraOpened(false),
165      mCameraInitialized(false),
166      mCallbackOps(NULL),
167      mInputStream(NULL),
168      mMetadataChannel(NULL),
169      mPictureChannel(NULL),
170      mFirstRequest(false),
171      mParamHeap(NULL),
172      mParameters(NULL),
173      mJpegSettings(NULL),
174      mIsZslMode(false),
175      mMinProcessedFrameDuration(0),
176      mMinJpegFrameDuration(0),
177      mMinRawFrameDuration(0),
178      m_pPowerModule(NULL)
179{
180    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
181    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
182    mCameraDevice.common.close = close_camera_device;
183    mCameraDevice.ops = &mCameraOps;
184    mCameraDevice.priv = this;
185    gCamCapability[cameraId]->version = CAM_HAL_V3;
186    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
187    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
188    gCamCapability[cameraId]->min_num_pp_bufs = 3;
189
190    pthread_cond_init(&mRequestCond, NULL);
191    mPendingRequest = 0;
192    mCurrentRequestId = -1;
193    pthread_mutex_init(&mMutex, NULL);
194
195    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
196        mDefaultMetadata[i] = NULL;
197
198#ifdef HAS_MULTIMEDIA_HINTS
199    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
200        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
201    }
202#endif
203}
204
205/*===========================================================================
206 * FUNCTION   : ~QCamera3HardwareInterface
207 *
208 * DESCRIPTION: destructor of QCamera3HardwareInterface
209 *
210 * PARAMETERS : none
211 *
212 * RETURN     : none
213 *==========================================================================*/
214QCamera3HardwareInterface::~QCamera3HardwareInterface()
215{
216    ALOGV("%s: E", __func__);
217    /* We need to stop all streams before deleting any stream */
218    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
219        it != mStreamInfo.end(); it++) {
220        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
221        if (channel)
222           channel->stop();
223    }
224    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
225        it != mStreamInfo.end(); it++) {
226        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
227        if (channel)
228            delete channel;
229        free (*it);
230    }
231
232    mPictureChannel = NULL;
233
234    if (mJpegSettings != NULL) {
235        free(mJpegSettings);
236        mJpegSettings = NULL;
237    }
238
239    /* Clean up all channels */
240    if (mCameraInitialized) {
241        mMetadataChannel->stop();
242        delete mMetadataChannel;
243        mMetadataChannel = NULL;
244        deinitParameters();
245    }
246
247    if (mCameraOpened)
248        closeCamera();
249
250    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
251        if (mDefaultMetadata[i])
252            free_camera_metadata(mDefaultMetadata[i]);
253
254    pthread_cond_destroy(&mRequestCond);
255
256    pthread_mutex_destroy(&mMutex);
257    ALOGV("%s: X", __func__);
258}
259
260/*===========================================================================
261 * FUNCTION   : openCamera
262 *
263 * DESCRIPTION: open camera
264 *
265 * PARAMETERS :
266 *   @hw_device  : double ptr for camera device struct
267 *
268 * RETURN     : int32_t type of status
269 *              NO_ERROR  -- success
270 *              none-zero failure code
271 *==========================================================================*/
272int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
273{
274    int rc = 0;
275    pthread_mutex_lock(&mCameraSessionLock);
276    if (mCameraSessionActive) {
277        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
278        pthread_mutex_unlock(&mCameraSessionLock);
279        return INVALID_OPERATION;
280    }
281
282    if (mCameraOpened) {
283        *hw_device = NULL;
284        return PERMISSION_DENIED;
285    }
286
287    rc = openCamera();
288    if (rc == 0) {
289        *hw_device = &mCameraDevice.common;
290        mCameraSessionActive = 1;
291    } else
292        *hw_device = NULL;
293
294#ifdef HAS_MULTIMEDIA_HINTS
295    if (rc == 0) {
296        if (m_pPowerModule) {
297            if (m_pPowerModule->powerHint) {
298                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
299                        (void *)"state=1");
300            }
301        }
302    }
303#endif
304    pthread_mutex_unlock(&mCameraSessionLock);
305    return rc;
306}
307
308/*===========================================================================
309 * FUNCTION   : openCamera
310 *
311 * DESCRIPTION: open camera
312 *
313 * PARAMETERS : none
314 *
315 * RETURN     : int32_t type of status
316 *              NO_ERROR  -- success
317 *              none-zero failure code
318 *==========================================================================*/
319int QCamera3HardwareInterface::openCamera()
320{
321    if (mCameraHandle) {
322        ALOGE("Failure: Camera already opened");
323        return ALREADY_EXISTS;
324    }
325    mCameraHandle = camera_open(mCameraId);
326    if (!mCameraHandle) {
327        ALOGE("camera_open failed.");
328        return UNKNOWN_ERROR;
329    }
330
331    mCameraOpened = true;
332
333    return NO_ERROR;
334}
335
336/*===========================================================================
337 * FUNCTION   : closeCamera
338 *
339 * DESCRIPTION: close camera
340 *
341 * PARAMETERS : none
342 *
343 * RETURN     : int32_t type of status
344 *              NO_ERROR  -- success
345 *              none-zero failure code
346 *==========================================================================*/
347int QCamera3HardwareInterface::closeCamera()
348{
349    int rc = NO_ERROR;
350
351    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
352    mCameraHandle = NULL;
353    mCameraOpened = false;
354
355#ifdef HAS_MULTIMEDIA_HINTS
356    if (rc == NO_ERROR) {
357        if (m_pPowerModule) {
358            if (m_pPowerModule->powerHint) {
359                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
360                        (void *)"state=0");
361            }
362        }
363    }
364#endif
365
366    return rc;
367}
368
369/*===========================================================================
370 * FUNCTION   : initialize
371 *
372 * DESCRIPTION: Initialize frameworks callback functions
373 *
374 * PARAMETERS :
375 *   @callback_ops : callback function to frameworks
376 *
377 * RETURN     :
378 *
379 *==========================================================================*/
380int QCamera3HardwareInterface::initialize(
381        const struct camera3_callback_ops *callback_ops)
382{
383    int rc;
384
385    pthread_mutex_lock(&mMutex);
386
387    rc = initParameters();
388    if (rc < 0) {
389        ALOGE("%s: initParamters failed %d", __func__, rc);
390       goto err1;
391    }
392    //Create metadata channel and initialize it
393    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
394                    mCameraHandle->ops, captureResultCb,
395                    &gCamCapability[mCameraId]->padding_info, this);
396    if (mMetadataChannel == NULL) {
397        ALOGE("%s: failed to allocate metadata channel", __func__);
398        rc = -ENOMEM;
399        goto err2;
400    }
401    rc = mMetadataChannel->initialize();
402    if (rc < 0) {
403        ALOGE("%s: metadata channel initialization failed", __func__);
404        goto err3;
405    }
406
407    mCallbackOps = callback_ops;
408
409    pthread_mutex_unlock(&mMutex);
410    mCameraInitialized = true;
411    return 0;
412
413err3:
414    delete mMetadataChannel;
415    mMetadataChannel = NULL;
416err2:
417    deinitParameters();
418err1:
419    pthread_mutex_unlock(&mMutex);
420    return rc;
421}
422
423/*===========================================================================
424 * FUNCTION   : configureStreams
425 *
426 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
427 *              and output streams.
428 *
429 * PARAMETERS :
430 *   @stream_list : streams to be configured
431 *
432 * RETURN     :
433 *
434 *==========================================================================*/
435int QCamera3HardwareInterface::configureStreams(
436        camera3_stream_configuration_t *streamList)
437{
438    int rc = 0;
439    pthread_mutex_lock(&mMutex);
440    // Sanity check stream_list
441    if (streamList == NULL) {
442        ALOGE("%s: NULL stream configuration", __func__);
443        pthread_mutex_unlock(&mMutex);
444        return BAD_VALUE;
445    }
446
447    if (streamList->streams == NULL) {
448        ALOGE("%s: NULL stream list", __func__);
449        pthread_mutex_unlock(&mMutex);
450        return BAD_VALUE;
451    }
452
453    if (streamList->num_streams < 1) {
454        ALOGE("%s: Bad number of streams requested: %d", __func__,
455                streamList->num_streams);
456        pthread_mutex_unlock(&mMutex);
457        return BAD_VALUE;
458    }
459
460    camera3_stream_t *inputStream = NULL;
461    camera3_stream_t *jpegStream = NULL;
462    /* first invalidate all the steams in the mStreamList
463     * if they appear again, they will be validated */
464    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
465            it != mStreamInfo.end(); it++) {
466        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
467        channel->stop();
468        (*it)->status = INVALID;
469    }
470
471    for (size_t i = 0; i < streamList->num_streams; i++) {
472        camera3_stream_t *newStream = streamList->streams[i];
473        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
474                __func__, newStream->stream_type, newStream->format,
475                 newStream->width, newStream->height);
476        //if the stream is in the mStreamList validate it
477        bool stream_exists = false;
478        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
479                it != mStreamInfo.end(); it++) {
480            if ((*it)->stream == newStream) {
481                QCamera3Channel *channel =
482                    (QCamera3Channel*)(*it)->stream->priv;
483                stream_exists = true;
484                (*it)->status = RECONFIGURE;
485                /*delete the channel object associated with the stream because
486                  we need to reconfigure*/
487                delete channel;
488                (*it)->stream->priv = NULL;
489            }
490        }
491        if (!stream_exists) {
492            //new stream
493            stream_info_t* stream_info;
494            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
495            stream_info->stream = newStream;
496            stream_info->status = VALID;
497            stream_info->registered = 0;
498            mStreamInfo.push_back(stream_info);
499        }
500        if (newStream->stream_type == CAMERA3_STREAM_INPUT
501                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
502            if (inputStream != NULL) {
503                ALOGE("%s: Multiple input streams requested!", __func__);
504                pthread_mutex_unlock(&mMutex);
505                return BAD_VALUE;
506            }
507            inputStream = newStream;
508        }
509        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
510            jpegStream = newStream;
511        }
512    }
513    mInputStream = inputStream;
514
515    /*clean up invalid streams*/
516    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
517            it != mStreamInfo.end();) {
518        if(((*it)->status) == INVALID){
519            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
520            delete channel;
521            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
522            free(*it);
523            it = mStreamInfo.erase(it);
524        } else {
525            it++;
526        }
527    }
528
529    //mMetadataChannel->stop();
530
531    /* Allocate channel objects for the requested streams */
532    for (size_t i = 0; i < streamList->num_streams; i++) {
533        camera3_stream_t *newStream = streamList->streams[i];
534        if (newStream->priv == NULL) {
535            //New stream, construct channel
536            switch (newStream->stream_type) {
537            case CAMERA3_STREAM_INPUT:
538                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
539                break;
540            case CAMERA3_STREAM_BIDIRECTIONAL:
541                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
542                    GRALLOC_USAGE_HW_CAMERA_WRITE;
543                break;
544            case CAMERA3_STREAM_OUTPUT:
545                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
546                break;
547            default:
548                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
549                break;
550            }
551
552            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
553                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
554                QCamera3Channel *channel;
555                switch (newStream->format) {
556                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
557                case HAL_PIXEL_FORMAT_YCbCr_420_888:
558                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
559                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
560                        jpegStream) {
561                        uint32_t width = jpegStream->width;
562                        uint32_t height = jpegStream->height;
563                        mIsZslMode = true;
564                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
565                            mCameraHandle->ops, captureResultCb,
566                            &gCamCapability[mCameraId]->padding_info, this, newStream,
567                            width, height);
568                    } else
569                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
570                            mCameraHandle->ops, captureResultCb,
571                            &gCamCapability[mCameraId]->padding_info, this, newStream);
572                    if (channel == NULL) {
573                        ALOGE("%s: allocation of channel failed", __func__);
574                        pthread_mutex_unlock(&mMutex);
575                        return -ENOMEM;
576                    }
577
578                    newStream->priv = channel;
579                    break;
580                case HAL_PIXEL_FORMAT_BLOB:
581                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
582                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
583                            mCameraHandle->ops, captureResultCb,
584                            &gCamCapability[mCameraId]->padding_info, this, newStream);
585                    if (mPictureChannel == NULL) {
586                        ALOGE("%s: allocation of channel failed", __func__);
587                        pthread_mutex_unlock(&mMutex);
588                        return -ENOMEM;
589                    }
590                    newStream->priv = (QCamera3Channel*)mPictureChannel;
591                    break;
592
593                //TODO: Add support for app consumed format?
594                default:
595                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
596                    break;
597                }
598            }
599        } else {
600            // Channel already exists for this stream
601            // Do nothing for now
602        }
603    }
604    /*For the streams to be reconfigured we need to register the buffers
605      since the framework wont*/
606    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
607            it != mStreamInfo.end(); it++) {
608        if ((*it)->status == RECONFIGURE) {
609            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
610            /*only register buffers for streams that have already been
611              registered*/
612            if ((*it)->registered) {
613                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
614                        (*it)->buffer_set.buffers);
615                if (rc != NO_ERROR) {
616                    ALOGE("%s: Failed to register the buffers of old stream,\
617                            rc = %d", __func__, rc);
618                }
619                ALOGV("%s: channel %p has %d buffers",
620                        __func__, channel, (*it)->buffer_set.num_buffers);
621            }
622        }
623
624        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
625        if (index == NAME_NOT_FOUND) {
626            mPendingBuffersMap.add((*it)->stream, 0);
627        } else {
628            mPendingBuffersMap.editValueAt(index) = 0;
629        }
630    }
631
632    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
633    mPendingRequestsList.clear();
634
635    //settings/parameters don't carry over for new configureStreams
636    memset(mParameters, 0, sizeof(parm_buffer_t));
637    mFirstRequest = true;
638
639    //Get min frame duration for this streams configuration
640    deriveMinFrameDuration();
641
642    pthread_mutex_unlock(&mMutex);
643    return rc;
644}
645
646/*===========================================================================
647 * FUNCTION   : validateCaptureRequest
648 *
649 * DESCRIPTION: validate a capture request from camera service
650 *
651 * PARAMETERS :
652 *   @request : request from framework to process
653 *
654 * RETURN     :
655 *
656 *==========================================================================*/
657int QCamera3HardwareInterface::validateCaptureRequest(
658                    camera3_capture_request_t *request)
659{
660    ssize_t idx = 0;
661    const camera3_stream_buffer_t *b;
662    CameraMetadata meta;
663
664    /* Sanity check the request */
665    if (request == NULL) {
666        ALOGE("%s: NULL capture request", __func__);
667        return BAD_VALUE;
668    }
669
670    uint32_t frameNumber = request->frame_number;
671    if (request->input_buffer != NULL &&
672            request->input_buffer->stream != mInputStream) {
673        ALOGE("%s: Request %d: Input buffer not from input stream!",
674                __FUNCTION__, frameNumber);
675        return BAD_VALUE;
676    }
677    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
678        ALOGE("%s: Request %d: No output buffers provided!",
679                __FUNCTION__, frameNumber);
680        return BAD_VALUE;
681    }
682    if (request->input_buffer != NULL) {
683        b = request->input_buffer;
684        QCamera3Channel *channel =
685            static_cast<QCamera3Channel*>(b->stream->priv);
686        if (channel == NULL) {
687            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
688                    __func__, frameNumber, idx);
689            return BAD_VALUE;
690        }
691        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
692            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
693                    __func__, frameNumber, idx);
694            return BAD_VALUE;
695        }
696        if (b->release_fence != -1) {
697            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
698                    __func__, frameNumber, idx);
699            return BAD_VALUE;
700        }
701        if (b->buffer == NULL) {
702            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
703                    __func__, frameNumber, idx);
704            return BAD_VALUE;
705        }
706    }
707
708    // Validate all buffers
709    b = request->output_buffers;
710    do {
711        QCamera3Channel *channel =
712                static_cast<QCamera3Channel*>(b->stream->priv);
713        if (channel == NULL) {
714            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
715                    __func__, frameNumber, idx);
716            return BAD_VALUE;
717        }
718        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
719            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
720                    __func__, frameNumber, idx);
721            return BAD_VALUE;
722        }
723        if (b->release_fence != -1) {
724            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
725                    __func__, frameNumber, idx);
726            return BAD_VALUE;
727        }
728        if (b->buffer == NULL) {
729            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
730                    __func__, frameNumber, idx);
731            return BAD_VALUE;
732        }
733        idx++;
734        b = request->output_buffers + idx;
735    } while (idx < (ssize_t)request->num_output_buffers);
736
737    return NO_ERROR;
738}
739
740/*===========================================================================
741 * FUNCTION   : deriveMinFrameDuration
742 *
743 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
744 *              on currently configured streams.
745 *
746 * PARAMETERS : NONE
747 *
748 * RETURN     : NONE
749 *
750 *==========================================================================*/
751void QCamera3HardwareInterface::deriveMinFrameDuration()
752{
753    int32_t maxJpegDimension, maxProcessedDimension;
754
755    maxJpegDimension = 0;
756    maxProcessedDimension = 0;
757
758    // Figure out maximum jpeg, processed, and raw dimensions
759    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
760        it != mStreamInfo.end(); it++) {
761
762        // Input stream doesn't have valid stream_type
763        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
764            continue;
765
766        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
767        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
768            if (dimension > maxJpegDimension)
769                maxJpegDimension = dimension;
770        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
771            if (dimension > maxProcessedDimension)
772                maxProcessedDimension = dimension;
773        }
774    }
775
776    //Assume all jpeg dimensions are in processed dimensions.
777    if (maxJpegDimension > maxProcessedDimension)
778        maxProcessedDimension = maxJpegDimension;
779
780    //Find minimum durations for processed, jpeg, and raw
781    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
782    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
783        if (maxProcessedDimension ==
784            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
785            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
786            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
787            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
788            break;
789        }
790    }
791}
792
793/*===========================================================================
794 * FUNCTION   : getMinFrameDuration
795 *
796 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
797 *              and current request configuration.
798 *
799 * PARAMETERS : @request: requset sent by the frameworks
800 *
801 * RETURN     : min farme duration for a particular request
802 *
803 *==========================================================================*/
804int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
805{
806    bool hasJpegStream = false;
807    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
808        const camera3_stream_t *stream = request->output_buffers[i].stream;
809        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
810            hasJpegStream = true;
811    }
812
813    if (!hasJpegStream)
814        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
815    else
816        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
817}
818
819/*===========================================================================
820 * FUNCTION   : registerStreamBuffers
821 *
822 * DESCRIPTION: Register buffers for a given stream with the HAL device.
823 *
824 * PARAMETERS :
825 *   @stream_list : streams to be configured
826 *
827 * RETURN     :
828 *
829 *==========================================================================*/
830int QCamera3HardwareInterface::registerStreamBuffers(
831        const camera3_stream_buffer_set_t *buffer_set)
832{
833    int rc = 0;
834
835    pthread_mutex_lock(&mMutex);
836
837    if (buffer_set == NULL) {
838        ALOGE("%s: Invalid buffer_set parameter.", __func__);
839        pthread_mutex_unlock(&mMutex);
840        return -EINVAL;
841    }
842    if (buffer_set->stream == NULL) {
843        ALOGE("%s: Invalid stream parameter.", __func__);
844        pthread_mutex_unlock(&mMutex);
845        return -EINVAL;
846    }
847    if (buffer_set->num_buffers < 1) {
848        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
849        pthread_mutex_unlock(&mMutex);
850        return -EINVAL;
851    }
852    if (buffer_set->buffers == NULL) {
853        ALOGE("%s: Invalid buffers parameter.", __func__);
854        pthread_mutex_unlock(&mMutex);
855        return -EINVAL;
856    }
857
858    camera3_stream_t *stream = buffer_set->stream;
859    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
860
861    //set the buffer_set in the mStreamInfo array
862    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
863            it != mStreamInfo.end(); it++) {
864        if ((*it)->stream == stream) {
865            uint32_t numBuffers = buffer_set->num_buffers;
866            (*it)->buffer_set.stream = buffer_set->stream;
867            (*it)->buffer_set.num_buffers = numBuffers;
868            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
869            if ((*it)->buffer_set.buffers == NULL) {
870                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
871                pthread_mutex_unlock(&mMutex);
872                return -ENOMEM;
873            }
874            for (size_t j = 0; j < numBuffers; j++){
875                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
876            }
877            (*it)->registered = 1;
878        }
879    }
880    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
881    if (rc < 0) {
882        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
883        pthread_mutex_unlock(&mMutex);
884        return -ENODEV;
885    }
886
887    pthread_mutex_unlock(&mMutex);
888    return NO_ERROR;
889}
890
891/*===========================================================================
892 * FUNCTION   : processCaptureRequest
893 *
894 * DESCRIPTION: process a capture request from camera service
895 *
896 * PARAMETERS :
897 *   @request : request from framework to process
898 *
899 * RETURN     :
900 *
901 *==========================================================================*/
902int QCamera3HardwareInterface::processCaptureRequest(
903                    camera3_capture_request_t *request)
904{
905    int rc = NO_ERROR;
906    int32_t request_id;
907    CameraMetadata meta;
908
909    pthread_mutex_lock(&mMutex);
910
911    rc = validateCaptureRequest(request);
912    if (rc != NO_ERROR) {
913        ALOGE("%s: incoming request is not valid", __func__);
914        pthread_mutex_unlock(&mMutex);
915        return rc;
916    }
917
918    uint32_t frameNumber = request->frame_number;
919    uint32_t streamTypeMask = 0;
920
921    meta = request->settings;
922    if (meta.exists(ANDROID_REQUEST_ID)) {
923        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
924        mCurrentRequestId = request_id;
925        ALOGV("%s: Received request with id: %d",__func__, request_id);
926    } else if (mFirstRequest || mCurrentRequestId == -1){
927        ALOGE("%s: Unable to find request id field, \
928                & no previous id available", __func__);
929        return NAME_NOT_FOUND;
930    } else {
931        ALOGV("%s: Re-using old request id", __func__);
932        request_id = mCurrentRequestId;
933    }
934
935    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
936                                    __func__, __LINE__,
937                                    request->num_output_buffers,
938                                    request->input_buffer,
939                                    frameNumber);
940    // Acquire all request buffers first
941    int blob_request = 0;
942    for (size_t i = 0; i < request->num_output_buffers; i++) {
943        const camera3_stream_buffer_t& output = request->output_buffers[i];
944        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
945        sp<Fence> acquireFence = new Fence(output.acquire_fence);
946
947        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
948        //Call function to store local copy of jpeg data for encode params.
949            blob_request = 1;
950            rc = getJpegSettings(request->settings);
951            if (rc < 0) {
952                ALOGE("%s: failed to get jpeg parameters", __func__);
953                pthread_mutex_unlock(&mMutex);
954                return rc;
955            }
956        }
957
958        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
959        if (rc != OK) {
960            ALOGE("%s: fence wait failed %d", __func__, rc);
961            pthread_mutex_unlock(&mMutex);
962            return rc;
963        }
964        streamTypeMask |= channel->getStreamTypeMask();
965    }
966
967    rc = setFrameParameters(request, streamTypeMask);
968    if (rc < 0) {
969        ALOGE("%s: fail to set frame parameters", __func__);
970        pthread_mutex_unlock(&mMutex);
971        return rc;
972    }
973
974    /* Update pending request list and pending buffers map */
975    PendingRequestInfo pendingRequest;
976    pendingRequest.frame_number = frameNumber;
977    pendingRequest.num_buffers = request->num_output_buffers;
978    pendingRequest.request_id = request_id;
979    pendingRequest.blob_request = blob_request;
980
981    for (size_t i = 0; i < request->num_output_buffers; i++) {
982        RequestedBufferInfo requestedBuf;
983        requestedBuf.stream = request->output_buffers[i].stream;
984        requestedBuf.buffer = NULL;
985        pendingRequest.buffers.push_back(requestedBuf);
986
987        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
988    }
989    mPendingRequestsList.push_back(pendingRequest);
990
991    // Notify metadata channel we receive a request
992    mMetadataChannel->request(NULL, frameNumber);
993
994    // Call request on other streams
995    for (size_t i = 0; i < request->num_output_buffers; i++) {
996        const camera3_stream_buffer_t& output = request->output_buffers[i];
997        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
998        mm_camera_buf_def_t *pInputBuffer = NULL;
999
1000        if (channel == NULL) {
1001            ALOGE("%s: invalid channel pointer for stream", __func__);
1002            continue;
1003        }
1004
1005        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1006            QCamera3RegularChannel* inputChannel = NULL;
1007            if(request->input_buffer != NULL){
1008
1009                //Try to get the internal format
1010                inputChannel = (QCamera3RegularChannel*)
1011                    request->input_buffer->stream->priv;
1012                if(inputChannel == NULL ){
1013                    ALOGE("%s: failed to get input channel handle", __func__);
1014                } else {
1015                    pInputBuffer =
1016                        inputChannel->getInternalFormatBuffer(
1017                                request->input_buffer->buffer);
1018                    ALOGD("%s: Input buffer dump",__func__);
1019                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1020                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1021                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1022                }
1023            }
1024            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1025                            pInputBuffer,(QCamera3Channel*)inputChannel);
1026        } else {
1027            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1028                __LINE__, output.buffer, frameNumber);
1029            rc = channel->request(output.buffer, frameNumber);
1030        }
1031        if (rc < 0)
1032            ALOGE("%s: request failed", __func__);
1033    }
1034
1035    mFirstRequest = false;
1036
1037    //Block on conditional variable
1038    mPendingRequest = 1;
1039    while (mPendingRequest == 1) {
1040        pthread_cond_wait(&mRequestCond, &mMutex);
1041    }
1042
1043    pthread_mutex_unlock(&mMutex);
1044    return rc;
1045}
1046
1047/*===========================================================================
1048 * FUNCTION   : getMetadataVendorTagOps
1049 *
1050 * DESCRIPTION:
1051 *
1052 * PARAMETERS :
1053 *
1054 *
1055 * RETURN     :
1056 *==========================================================================*/
1057void QCamera3HardwareInterface::getMetadataVendorTagOps(
1058                    vendor_tag_query_ops_t* /*ops*/)
1059{
1060    /* Enable locks when we eventually add Vendor Tags */
1061    /*
1062    pthread_mutex_lock(&mMutex);
1063
1064    pthread_mutex_unlock(&mMutex);
1065    */
1066    return;
1067}
1068
1069/*===========================================================================
1070 * FUNCTION   : dump
1071 *
1072 * DESCRIPTION:
1073 *
1074 * PARAMETERS :
1075 *
1076 *
1077 * RETURN     :
1078 *==========================================================================*/
1079void QCamera3HardwareInterface::dump(int /*fd*/)
1080{
1081    /*Enable lock when we implement this function*/
1082    /*
1083    pthread_mutex_lock(&mMutex);
1084
1085    pthread_mutex_unlock(&mMutex);
1086    */
1087    return;
1088}
1089
1090
1091/*===========================================================================
1092 * FUNCTION   : captureResultCb
1093 *
1094 * DESCRIPTION: Callback handler for all capture result
1095 *              (streams, as well as metadata)
1096 *
1097 * PARAMETERS :
1098 *   @metadata : metadata information
1099 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1100 *               NULL if metadata.
1101 *
1102 * RETURN     : NONE
1103 *==========================================================================*/
1104void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1105                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1106{
1107    pthread_mutex_lock(&mMutex);
1108
1109    if (metadata_buf) {
1110        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1111        int32_t frame_number_valid = *(int32_t *)
1112            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1113        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1114            CAM_INTF_META_PENDING_REQUESTS, metadata);
1115        uint32_t frame_number = *(uint32_t *)
1116            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1117        const struct timeval *tv = (const struct timeval *)
1118            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1119        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1120            tv->tv_usec * NSEC_PER_USEC;
1121
1122        if (!frame_number_valid) {
1123            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1124            mMetadataChannel->bufDone(metadata_buf);
1125            goto done_metadata;
1126        }
1127        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1128                frame_number, capture_time);
1129
1130        // Go through the pending requests info and send shutter/results to frameworks
1131        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1132                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1133            camera3_capture_result_t result;
1134            camera3_notify_msg_t notify_msg;
1135            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1136
1137            // Flush out all entries with less or equal frame numbers.
1138
1139            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1140            //Right now it's the same as metadata timestamp
1141
1142            //TODO: When there is metadata drop, how do we derive the timestamp of
1143            //dropped frames? For now, we fake the dropped timestamp by substracting
1144            //from the reported timestamp
1145            nsecs_t current_capture_time = capture_time -
1146                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1147
1148            // Send shutter notify to frameworks
1149            notify_msg.type = CAMERA3_MSG_SHUTTER;
1150            notify_msg.message.shutter.frame_number = i->frame_number;
1151            notify_msg.message.shutter.timestamp = current_capture_time;
1152            mCallbackOps->notify(mCallbackOps, &notify_msg);
1153            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1154                    i->frame_number, capture_time);
1155
1156            // Send empty metadata with already filled buffers for dropped metadata
1157            // and send valid metadata with already filled buffers for current metadata
1158            if (i->frame_number < frame_number) {
1159                CameraMetadata dummyMetadata;
1160                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1161                        &current_capture_time, 1);
1162                dummyMetadata.update(ANDROID_REQUEST_ID,
1163                        &(i->request_id), 1);
1164                result.result = dummyMetadata.release();
1165            } else {
1166                result.result = translateCbMetadataToResultMetadata(metadata,
1167                        current_capture_time, i->request_id);
1168                if (i->blob_request && needReprocess()) {
1169                   //If it is a blob request then send the metadata to the picture channel
1170                   mPictureChannel->queueMetadata(metadata_buf);
1171
1172                } else {
1173                   // Return metadata buffer
1174                   mMetadataChannel->bufDone(metadata_buf);
1175                   free(metadata_buf);
1176                }
1177            }
1178            if (!result.result) {
1179                ALOGE("%s: metadata is NULL", __func__);
1180            }
1181            result.frame_number = i->frame_number;
1182            result.num_output_buffers = 0;
1183            result.output_buffers = NULL;
1184            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1185                    j != i->buffers.end(); j++) {
1186                if (j->buffer) {
1187                    result.num_output_buffers++;
1188                }
1189            }
1190
1191            if (result.num_output_buffers > 0) {
1192                camera3_stream_buffer_t *result_buffers =
1193                    new camera3_stream_buffer_t[result.num_output_buffers];
1194                if (!result_buffers) {
1195                    ALOGE("%s: Fatal error: out of memory", __func__);
1196                }
1197                size_t result_buffers_idx = 0;
1198                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1199                        j != i->buffers.end(); j++) {
1200                    if (j->buffer) {
1201                        result_buffers[result_buffers_idx++] = *(j->buffer);
1202                        free(j->buffer);
1203                        j->buffer = NULL;
1204                        mPendingBuffersMap.editValueFor(j->stream)--;
1205                    }
1206                }
1207                result.output_buffers = result_buffers;
1208
1209                mCallbackOps->process_capture_result(mCallbackOps, &result);
1210                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1211                        __func__, result.frame_number, current_capture_time);
1212                free_camera_metadata((camera_metadata_t *)result.result);
1213                delete[] result_buffers;
1214            } else {
1215                mCallbackOps->process_capture_result(mCallbackOps, &result);
1216                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1217                        __func__, result.frame_number, current_capture_time);
1218                free_camera_metadata((camera_metadata_t *)result.result);
1219            }
1220            // erase the element from the list
1221            i = mPendingRequestsList.erase(i);
1222        }
1223
1224
1225done_metadata:
1226        bool max_buffers_dequeued = false;
1227        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1228            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1229            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1230            if (queued_buffers == stream->max_buffers) {
1231                max_buffers_dequeued = true;
1232                break;
1233            }
1234        }
1235        if (!max_buffers_dequeued && !pending_requests) {
1236            // Unblock process_capture_request
1237            mPendingRequest = 0;
1238            pthread_cond_signal(&mRequestCond);
1239        }
1240    } else {
1241        // If the frame number doesn't exist in the pending request list,
1242        // directly send the buffer to the frameworks, and update pending buffers map
1243        // Otherwise, book-keep the buffer.
1244        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1245        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1246            i++;
1247        }
1248        if (i == mPendingRequestsList.end()) {
1249            // Verify all pending requests frame_numbers are greater
1250            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1251                    j != mPendingRequestsList.end(); j++) {
1252                if (j->frame_number < frame_number) {
1253                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1254                            __func__, j->frame_number, frame_number);
1255                }
1256            }
1257            camera3_capture_result_t result;
1258            result.result = NULL;
1259            result.frame_number = frame_number;
1260            result.num_output_buffers = 1;
1261            result.output_buffers = buffer;
1262            ALOGV("%s: result frame_number = %d, buffer = %p",
1263                    __func__, frame_number, buffer);
1264            mPendingBuffersMap.editValueFor(buffer->stream)--;
1265            mCallbackOps->process_capture_result(mCallbackOps, &result);
1266        } else {
1267            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1268                    j != i->buffers.end(); j++) {
1269                if (j->stream == buffer->stream) {
1270                    if (j->buffer != NULL) {
1271                        ALOGE("%s: Error: buffer is already set", __func__);
1272                    } else {
1273                        j->buffer = (camera3_stream_buffer_t *)malloc(
1274                                sizeof(camera3_stream_buffer_t));
1275                        *(j->buffer) = *buffer;
1276                        ALOGV("%s: cache buffer %p at result frame_number %d",
1277                                __func__, buffer, frame_number);
1278                    }
1279                }
1280            }
1281        }
1282    }
1283    pthread_mutex_unlock(&mMutex);
1284    return;
1285}
1286
1287/*===========================================================================
1288 * FUNCTION   : translateCbMetadataToResultMetadata
1289 *
1290 * DESCRIPTION:
1291 *
1292 * PARAMETERS :
1293 *   @metadata : metadata information from callback
1294 *
1295 * RETURN     : camera_metadata_t*
1296 *              metadata in a format specified by fwk
1297 *==========================================================================*/
1298camera_metadata_t*
1299QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1300                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1301                                 int32_t request_id)
1302{
1303    CameraMetadata camMetadata;
1304    camera_metadata_t* resultMetadata;
1305
1306    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1307    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1308
1309    /*CAM_INTF_META_HISTOGRAM - TODO*/
1310    /*cam_hist_stats_t  *histogram =
1311      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1312      metadata);*/
1313
1314    /*face detection*/
1315    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1316        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1317    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1318    int32_t faceIds[numFaces];
1319    uint8_t faceScores[numFaces];
1320    int32_t faceRectangles[numFaces * 4];
1321    int32_t faceLandmarks[numFaces * 6];
1322    int j = 0, k = 0;
1323    for (int i = 0; i < numFaces; i++) {
1324        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1325        faceScores[i] = faceDetectionInfo->faces[i].score;
1326        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1327                faceRectangles+j, -1);
1328        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1329        j+= 4;
1330        k+= 6;
1331    }
1332    if (numFaces > 0) {
1333        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1334        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1335        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1336            faceRectangles, numFaces*4);
1337        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1338            faceLandmarks, numFaces*6);
1339    }
1340
1341    uint8_t  *color_correct_mode =
1342        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1343    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1344
1345    int32_t  *ae_precapture_id =
1346        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1347    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1348
1349    /*aec regions*/
1350    cam_area_t  *hAeRegions =
1351        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1352    int32_t aeRegions[5];
1353    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1354    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1355    if(mIsZslMode) {
1356        uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1357        camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1358    } else {
1359        uint8_t *ae_state =
1360            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1361        camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1362    }
1363    uint8_t  *focusMode =
1364        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1365    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1366
1367    /*af regions*/
1368    cam_area_t  *hAfRegions =
1369        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1370    int32_t afRegions[5];
1371    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1372    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1373
1374    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1375    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1376
1377    int32_t  *afTriggerId =
1378        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1379    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1380
1381    uint8_t  *whiteBalance =
1382        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1383    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1384
1385    /*awb regions*/
1386    cam_area_t  *hAwbRegions =
1387        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1388    int32_t awbRegions[5];
1389    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1390    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1391
1392    uint8_t  *whiteBalanceState =
1393        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1394    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1395
1396    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1397    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1398
1399    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1400    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1401
1402    uint8_t  *flashPower =
1403        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1404    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1405
1406    int64_t  *flashFiringTime =
1407        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1408    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1409
1410    /*int32_t  *ledMode =
1411      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1412      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1413
1414    uint8_t  *flashState =
1415        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1416    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1417
1418    uint8_t  *hotPixelMode =
1419        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1420    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1421
1422    float  *lensAperture =
1423        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1424    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1425
1426    float  *filterDensity =
1427        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1428    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1429
1430    float  *focalLength =
1431        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1432    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1433
1434    float  *focusDistance =
1435        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1436    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1437
1438    float  *focusRange =
1439        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1440    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1441
1442    uint8_t  *opticalStab =
1443        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1444    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1445
1446    /*int32_t  *focusState =
1447      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1448      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1449
1450    uint8_t  *noiseRedMode =
1451        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1452    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1453
1454    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1455
1456    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1457        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1458    int32_t scalerCropRegion[4];
1459    scalerCropRegion[0] = hScalerCropRegion->left;
1460    scalerCropRegion[1] = hScalerCropRegion->top;
1461    scalerCropRegion[2] = hScalerCropRegion->width;
1462    scalerCropRegion[3] = hScalerCropRegion->height;
1463    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1464
1465    int64_t  *sensorExpTime =
1466        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1467    mMetadataResponse.exposure_time = *sensorExpTime;
1468    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1469    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1470
1471    int64_t  *sensorFameDuration =
1472        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1473    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1474    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1475
1476    int32_t  *sensorSensitivity =
1477        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1478    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1479    mMetadataResponse.iso_speed = *sensorSensitivity;
1480    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1481
1482    uint8_t  *shadingMode =
1483        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1484    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1485
1486    uint8_t  *faceDetectMode =
1487        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1488    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1489        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1490        *faceDetectMode);
1491    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1492
1493    uint8_t  *histogramMode =
1494        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1495    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1496
1497    uint8_t  *sharpnessMapMode =
1498        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1499    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1500            sharpnessMapMode, 1);
1501
1502    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1503    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1504        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1505    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1506            (int32_t*)sharpnessMap->sharpness,
1507            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1508
1509    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1510        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1511    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1512    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1513    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1514                       (float*)lensShadingMap->lens_shading,
1515                       4*map_width*map_height);
1516
1517    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1518        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1519    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1520
1521    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1522        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1523    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1524                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1525
1526    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1527        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1528    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1529                       predColorCorrectionGains->gains, 4);
1530
1531    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1532        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1533    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1534                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1535
1536    uint8_t *blackLevelLock = (uint8_t*)
1537        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1538    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1539
1540    uint8_t *sceneFlicker = (uint8_t*)
1541        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1542    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1543
1544
1545    resultMetadata = camMetadata.release();
1546    return resultMetadata;
1547}
1548
1549/*===========================================================================
1550 * FUNCTION   : convertToRegions
1551 *
1552 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1553 *
1554 * PARAMETERS :
1555 *   @rect   : cam_rect_t struct to convert
1556 *   @region : int32_t destination array
1557 *   @weight : if we are converting from cam_area_t, weight is valid
1558 *             else weight = -1
1559 *
1560 *==========================================================================*/
1561void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1562    region[0] = rect.left;
1563    region[1] = rect.top;
1564    region[2] = rect.left + rect.width;
1565    region[3] = rect.top + rect.height;
1566    if (weight > -1) {
1567        region[4] = weight;
1568    }
1569}
1570
1571/*===========================================================================
1572 * FUNCTION   : convertFromRegions
1573 *
1574 * DESCRIPTION: helper method to convert from array to cam_rect_t
1575 *
1576 * PARAMETERS :
1577 *   @rect   : cam_rect_t struct to convert
1578 *   @region : int32_t destination array
1579 *   @weight : if we are converting from cam_area_t, weight is valid
1580 *             else weight = -1
1581 *
1582 *==========================================================================*/
1583void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1584                                                   const camera_metadata_t *settings,
1585                                                   uint32_t tag){
1586    CameraMetadata frame_settings;
1587    frame_settings = settings;
1588    int32_t x_min = frame_settings.find(tag).data.i32[0];
1589    int32_t y_min = frame_settings.find(tag).data.i32[1];
1590    int32_t x_max = frame_settings.find(tag).data.i32[2];
1591    int32_t y_max = frame_settings.find(tag).data.i32[3];
1592    roi->weight = frame_settings.find(tag).data.i32[4];
1593    roi->rect.left = x_min;
1594    roi->rect.top = y_min;
1595    roi->rect.width = x_max - x_min;
1596    roi->rect.height = y_max - y_min;
1597}
1598
1599/*===========================================================================
1600 * FUNCTION   : resetIfNeededROI
1601 *
1602 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1603 *              crop region
1604 *
1605 * PARAMETERS :
1606 *   @roi       : cam_area_t struct to resize
1607 *   @scalerCropRegion : cam_crop_region_t region to compare against
1608 *
1609 *
1610 *==========================================================================*/
1611bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1612                                                 const cam_crop_region_t* scalerCropRegion)
1613{
1614    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1615    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1616    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1617    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1618    if ((roi_x_max < scalerCropRegion->left) ||
1619        (roi_y_max < scalerCropRegion->top)  ||
1620        (roi->rect.left > crop_x_max) ||
1621        (roi->rect.top > crop_y_max)){
1622        return false;
1623    }
1624    if (roi->rect.left < scalerCropRegion->left) {
1625        roi->rect.left = scalerCropRegion->left;
1626    }
1627    if (roi->rect.top < scalerCropRegion->top) {
1628        roi->rect.top = scalerCropRegion->top;
1629    }
1630    if (roi_x_max > crop_x_max) {
1631        roi_x_max = crop_x_max;
1632    }
1633    if (roi_y_max > crop_y_max) {
1634        roi_y_max = crop_y_max;
1635    }
1636    roi->rect.width = roi_x_max - roi->rect.left;
1637    roi->rect.height = roi_y_max - roi->rect.top;
1638    return true;
1639}
1640
1641/*===========================================================================
1642 * FUNCTION   : convertLandmarks
1643 *
1644 * DESCRIPTION: helper method to extract the landmarks from face detection info
1645 *
1646 * PARAMETERS :
1647 *   @face   : cam_rect_t struct to convert
1648 *   @landmarks : int32_t destination array
1649 *
1650 *
1651 *==========================================================================*/
1652void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1653{
1654    landmarks[0] = face.left_eye_center.x;
1655    landmarks[1] = face.left_eye_center.y;
1656    landmarks[2] = face.right_eye_center.y;
1657    landmarks[3] = face.right_eye_center.y;
1658    landmarks[4] = face.mouth_center.x;
1659    landmarks[5] = face.mouth_center.y;
1660}
1661
1662#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1663/*===========================================================================
1664 * FUNCTION   : initCapabilities
1665 *
1666 * DESCRIPTION: initialize camera capabilities in static data struct
1667 *
1668 * PARAMETERS :
1669 *   @cameraId  : camera Id
1670 *
1671 * RETURN     : int32_t type of status
1672 *              NO_ERROR  -- success
1673 *              none-zero failure code
1674 *==========================================================================*/
1675int QCamera3HardwareInterface::initCapabilities(int cameraId)
1676{
1677    int rc = 0;
1678    mm_camera_vtbl_t *cameraHandle = NULL;
1679    QCamera3HeapMemory *capabilityHeap = NULL;
1680
1681    cameraHandle = camera_open(cameraId);
1682    if (!cameraHandle) {
1683        ALOGE("%s: camera_open failed", __func__);
1684        rc = -1;
1685        goto open_failed;
1686    }
1687
1688    capabilityHeap = new QCamera3HeapMemory();
1689    if (capabilityHeap == NULL) {
1690        ALOGE("%s: creation of capabilityHeap failed", __func__);
1691        goto heap_creation_failed;
1692    }
1693    /* Allocate memory for capability buffer */
1694    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1695    if(rc != OK) {
1696        ALOGE("%s: No memory for cappability", __func__);
1697        goto allocate_failed;
1698    }
1699
1700    /* Map memory for capability buffer */
1701    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1702    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1703                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1704                                capabilityHeap->getFd(0),
1705                                sizeof(cam_capability_t));
1706    if(rc < 0) {
1707        ALOGE("%s: failed to map capability buffer", __func__);
1708        goto map_failed;
1709    }
1710
1711    /* Query Capability */
1712    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1713    if(rc < 0) {
1714        ALOGE("%s: failed to query capability",__func__);
1715        goto query_failed;
1716    }
1717    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1718    if (!gCamCapability[cameraId]) {
1719        ALOGE("%s: out of memory", __func__);
1720        goto query_failed;
1721    }
1722    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1723                                        sizeof(cam_capability_t));
1724    rc = 0;
1725
1726query_failed:
1727    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1728                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1729map_failed:
1730    capabilityHeap->deallocate();
1731allocate_failed:
1732    delete capabilityHeap;
1733heap_creation_failed:
1734    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1735    cameraHandle = NULL;
1736open_failed:
1737    return rc;
1738}
1739
1740/*===========================================================================
1741 * FUNCTION   : initParameters
1742 *
1743 * DESCRIPTION: initialize camera parameters
1744 *
1745 * PARAMETERS :
1746 *
1747 * RETURN     : int32_t type of status
1748 *              NO_ERROR  -- success
1749 *              none-zero failure code
1750 *==========================================================================*/
1751int QCamera3HardwareInterface::initParameters()
1752{
1753    int rc = 0;
1754
1755    //Allocate Set Param Buffer
1756    mParamHeap = new QCamera3HeapMemory();
1757    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1758    if(rc != OK) {
1759        rc = NO_MEMORY;
1760        ALOGE("Failed to allocate SETPARM Heap memory");
1761        delete mParamHeap;
1762        mParamHeap = NULL;
1763        return rc;
1764    }
1765
1766    //Map memory for parameters buffer
1767    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1768            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1769            mParamHeap->getFd(0),
1770            sizeof(parm_buffer_t));
1771    if(rc < 0) {
1772        ALOGE("%s:failed to map SETPARM buffer",__func__);
1773        rc = FAILED_TRANSACTION;
1774        mParamHeap->deallocate();
1775        delete mParamHeap;
1776        mParamHeap = NULL;
1777        return rc;
1778    }
1779
1780    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1781    return rc;
1782}
1783
1784/*===========================================================================
1785 * FUNCTION   : deinitParameters
1786 *
1787 * DESCRIPTION: de-initialize camera parameters
1788 *
1789 * PARAMETERS :
1790 *
1791 * RETURN     : NONE
1792 *==========================================================================*/
1793void QCamera3HardwareInterface::deinitParameters()
1794{
1795    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1796            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1797
1798    mParamHeap->deallocate();
1799    delete mParamHeap;
1800    mParamHeap = NULL;
1801
1802    mParameters = NULL;
1803}
1804
1805/*===========================================================================
1806 * FUNCTION   : calcMaxJpegSize
1807 *
1808 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1809 *
1810 * PARAMETERS :
1811 *
1812 * RETURN     : max_jpeg_size
1813 *==========================================================================*/
1814int QCamera3HardwareInterface::calcMaxJpegSize()
1815{
1816    int32_t max_jpeg_size = 0;
1817    int temp_width, temp_height;
1818    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1819        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1820        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1821        if (temp_width * temp_height > max_jpeg_size ) {
1822            max_jpeg_size = temp_width * temp_height;
1823        }
1824    }
1825    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1826    return max_jpeg_size;
1827}
1828
1829/*===========================================================================
1830 * FUNCTION   : initStaticMetadata
1831 *
1832 * DESCRIPTION: initialize the static metadata
1833 *
1834 * PARAMETERS :
1835 *   @cameraId  : camera Id
1836 *
1837 * RETURN     : int32_t type of status
1838 *              0  -- success
1839 *              non-zero failure code
1840 *==========================================================================*/
1841int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1842{
1843    int rc = 0;
1844    CameraMetadata staticInfo;
1845
1846    /* android.info: hardware level */
1847    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
1848    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1849        &supportedHardwareLevel, 1);
1850
1851    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1852    /*HAL 3 only*/
1853    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1854                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1855
1856    /*hard coded for now but this should come from sensor*/
1857    float min_focus_distance;
1858    if(facingBack){
1859        min_focus_distance = 10;
1860    } else {
1861        min_focus_distance = 0;
1862    }
1863    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1864                    &min_focus_distance, 1);
1865
1866    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1867                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1868
1869    /*should be using focal lengths but sensor doesn't provide that info now*/
1870    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1871                      &gCamCapability[cameraId]->focal_length,
1872                      1);
1873
1874    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1875                      gCamCapability[cameraId]->apertures,
1876                      gCamCapability[cameraId]->apertures_count);
1877
1878    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1879                gCamCapability[cameraId]->filter_densities,
1880                gCamCapability[cameraId]->filter_densities_count);
1881
1882
1883    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1884                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1885                      gCamCapability[cameraId]->optical_stab_modes_count);
1886
1887    staticInfo.update(ANDROID_LENS_POSITION,
1888                      gCamCapability[cameraId]->lens_position,
1889                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1890
1891    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1892                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1893    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1894                      lens_shading_map_size,
1895                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1896
1897    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1898                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1899    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1900            geo_correction_map_size,
1901            sizeof(geo_correction_map_size)/sizeof(int32_t));
1902
1903    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1904                       gCamCapability[cameraId]->geo_correction_map,
1905                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1906
1907    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1908            gCamCapability[cameraId]->sensor_physical_size, 2);
1909
1910    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1911            gCamCapability[cameraId]->exposure_time_range, 2);
1912
1913    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1914            &gCamCapability[cameraId]->max_frame_duration, 1);
1915
1916
1917    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1918                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1919
1920    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1921                                               gCamCapability[cameraId]->pixel_array_size.height};
1922    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1923                      pixel_array_size, 2);
1924
1925    int32_t active_array_size[] = {0, 0,
1926                                                gCamCapability[cameraId]->active_array_size.width,
1927                                                gCamCapability[cameraId]->active_array_size.height};
1928    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1929                      active_array_size, 4);
1930
1931    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1932            &gCamCapability[cameraId]->white_level, 1);
1933
1934    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1935            gCamCapability[cameraId]->black_level_pattern, 4);
1936
1937    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1938                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1939
1940    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1941                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1942
1943    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1944                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
1945
1946    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1947                      &gCamCapability[cameraId]->histogram_size, 1);
1948
1949    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1950            &gCamCapability[cameraId]->max_histogram_count, 1);
1951
1952    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1953                                                gCamCapability[cameraId]->sharpness_map_size.height};
1954
1955    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1956            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1957
1958    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1959            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1960
1961
1962    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1963                      &gCamCapability[cameraId]->raw_min_duration,
1964                       1);
1965
1966    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1967                                                HAL_PIXEL_FORMAT_BLOB};
1968    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1969    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1970                      scalar_formats,
1971                      scalar_formats_count);
1972
1973    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1974    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1975              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1976              available_processed_sizes);
1977    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1978                available_processed_sizes,
1979                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
1980
1981    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1982                      &gCamCapability[cameraId]->jpeg_min_duration[0],
1983                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
1984
1985    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1986    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1987                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1988                 available_fps_ranges);
1989    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1990            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1991
1992    camera_metadata_rational exposureCompensationStep = {
1993            gCamCapability[cameraId]->exp_compensation_step.numerator,
1994            gCamCapability[cameraId]->exp_compensation_step.denominator};
1995    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1996                      &exposureCompensationStep, 1);
1997
1998    /*TO DO*/
1999    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2000    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2001                      availableVstabModes, sizeof(availableVstabModes));
2002
2003    /*HAL 1 and HAL 3 common*/
2004    float maxZoom = 4;
2005    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2006            &maxZoom, 1);
2007
2008    int32_t max3aRegions = 1;
2009    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2010            &max3aRegions, 1);
2011
2012    uint8_t availableFaceDetectModes[] = {
2013            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2014            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2015    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2016                      availableFaceDetectModes,
2017                      sizeof(availableFaceDetectModes));
2018
2019    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2020                                       gCamCapability[cameraId]->raw_dim.height};
2021    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2022                      raw_size,
2023                      sizeof(raw_size)/sizeof(uint32_t));
2024
2025    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2026                                                        gCamCapability[cameraId]->exposure_compensation_max};
2027    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2028            exposureCompensationRange,
2029            sizeof(exposureCompensationRange)/sizeof(int32_t));
2030
2031    uint8_t lensFacing = (facingBack) ?
2032            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2033    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2034
2035    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2036                available_processed_sizes,
2037                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2038
2039    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2040                      available_thumbnail_sizes,
2041                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2042
2043    int32_t max_jpeg_size = 0;
2044    int temp_width, temp_height;
2045    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2046        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2047        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2048        if (temp_width * temp_height > max_jpeg_size ) {
2049            max_jpeg_size = temp_width * temp_height;
2050        }
2051    }
2052    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2053    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2054                      &max_jpeg_size, 1);
2055
2056    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2057    int32_t size = 0;
2058    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2059        int val = lookupFwkName(EFFECT_MODES_MAP,
2060                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2061                                   gCamCapability[cameraId]->supported_effects[i]);
2062        if (val != NAME_NOT_FOUND) {
2063            avail_effects[size] = (uint8_t)val;
2064            size++;
2065        }
2066    }
2067    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2068                      avail_effects,
2069                      size);
2070
2071    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2072    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2073    int32_t supported_scene_modes_cnt = 0;
2074    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2075        int val = lookupFwkName(SCENE_MODES_MAP,
2076                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2077                                gCamCapability[cameraId]->supported_scene_modes[i]);
2078        if (val != NAME_NOT_FOUND) {
2079            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2080            supported_indexes[supported_scene_modes_cnt] = i;
2081            supported_scene_modes_cnt++;
2082        }
2083    }
2084
2085    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2086                      avail_scene_modes,
2087                      supported_scene_modes_cnt);
2088
2089    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2090    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2091                      supported_scene_modes_cnt,
2092                      scene_mode_overrides,
2093                      supported_indexes,
2094                      cameraId);
2095    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2096                      scene_mode_overrides,
2097                      supported_scene_modes_cnt*3);
2098
2099    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2100    size = 0;
2101    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2102        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2103                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2104                                 gCamCapability[cameraId]->supported_antibandings[i]);
2105        if (val != NAME_NOT_FOUND) {
2106            avail_antibanding_modes[size] = (uint8_t)val;
2107            size++;
2108        }
2109
2110    }
2111    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2112                      avail_antibanding_modes,
2113                      size);
2114
2115    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2116    size = 0;
2117    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2118        int val = lookupFwkName(FOCUS_MODES_MAP,
2119                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2120                                gCamCapability[cameraId]->supported_focus_modes[i]);
2121        if (val != NAME_NOT_FOUND) {
2122            avail_af_modes[size] = (uint8_t)val;
2123            size++;
2124        }
2125    }
2126    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2127                      avail_af_modes,
2128                      size);
2129
2130    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2131    size = 0;
2132    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2133        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2134                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2135                                    gCamCapability[cameraId]->supported_white_balances[i]);
2136        if (val != NAME_NOT_FOUND) {
2137            avail_awb_modes[size] = (uint8_t)val;
2138            size++;
2139        }
2140    }
2141    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2142                      avail_awb_modes,
2143                      size);
2144
2145    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2146    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2147      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2148
2149    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2150            available_flash_levels,
2151            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2152
2153
2154    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2155    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2156            &flashAvailable, 1);
2157
2158    uint8_t avail_ae_modes[5];
2159    size = 0;
2160    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2161        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2162        size++;
2163    }
2164    if (flashAvailable) {
2165        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2166        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2167        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2168    }
2169    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2170                      avail_ae_modes,
2171                      size);
2172
2173    int32_t sensitivity_range[2];
2174    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2175    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2176    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2177                      sensitivity_range,
2178                      sizeof(sensitivity_range) / sizeof(int32_t));
2179
2180    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2181                      &gCamCapability[cameraId]->max_analog_sensitivity,
2182                      1);
2183
2184    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2185                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2186                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2187
2188    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2189    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2190                      &sensor_orientation,
2191                      1);
2192
2193    int32_t max_output_streams[3] = {1, 3, 1};
2194    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2195                      max_output_streams,
2196                      3);
2197
2198    gStaticMetadata[cameraId] = staticInfo.release();
2199    return rc;
2200}
2201
2202/*===========================================================================
2203 * FUNCTION   : makeTable
2204 *
2205 * DESCRIPTION: make a table of sizes
2206 *
2207 * PARAMETERS :
2208 *
2209 *
2210 *==========================================================================*/
2211void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2212                                          int32_t* sizeTable)
2213{
2214    int j = 0;
2215    for (int i = 0; i < size; i++) {
2216        sizeTable[j] = dimTable[i].width;
2217        sizeTable[j+1] = dimTable[i].height;
2218        j+=2;
2219    }
2220}
2221
2222/*===========================================================================
2223 * FUNCTION   : makeFPSTable
2224 *
2225 * DESCRIPTION: make a table of fps ranges
2226 *
2227 * PARAMETERS :
2228 *
2229 *==========================================================================*/
2230void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2231                                          int32_t* fpsRangesTable)
2232{
2233    int j = 0;
2234    for (int i = 0; i < size; i++) {
2235        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2236        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2237        j+=2;
2238    }
2239}
2240
2241/*===========================================================================
2242 * FUNCTION   : makeOverridesList
2243 *
2244 * DESCRIPTION: make a list of scene mode overrides
2245 *
2246 * PARAMETERS :
2247 *
2248 *
2249 *==========================================================================*/
2250void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2251                                                  uint8_t size, uint8_t* overridesList,
2252                                                  uint8_t* supported_indexes,
2253                                                  int camera_id)
2254{
2255    /*daemon will give a list of overrides for all scene modes.
2256      However we should send the fwk only the overrides for the scene modes
2257      supported by the framework*/
2258    int j = 0, index = 0, supt = 0;
2259    uint8_t focus_override;
2260    for (int i = 0; i < size; i++) {
2261        supt = 0;
2262        index = supported_indexes[i];
2263        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2264        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2265                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2266                                                    overridesTable[index].awb_mode);
2267        focus_override = (uint8_t)overridesTable[index].af_mode;
2268        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2269           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2270              supt = 1;
2271              break;
2272           }
2273        }
2274        if (supt) {
2275           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2276                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2277                                              focus_override);
2278        } else {
2279           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2280        }
2281        j+=3;
2282    }
2283}
2284
2285/*===========================================================================
2286 * FUNCTION   : getPreviewHalPixelFormat
2287 *
2288 * DESCRIPTION: convert the format to type recognized by framework
2289 *
2290 * PARAMETERS : format : the format from backend
2291 *
2292 ** RETURN    : format recognized by framework
2293 *
2294 *==========================================================================*/
2295int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2296{
2297    int32_t halPixelFormat;
2298
2299    switch (format) {
2300    case CAM_FORMAT_YUV_420_NV12:
2301        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2302        break;
2303    case CAM_FORMAT_YUV_420_NV21:
2304        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2305        break;
2306    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2307        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2308        break;
2309    case CAM_FORMAT_YUV_420_YV12:
2310        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2311        break;
2312    case CAM_FORMAT_YUV_422_NV16:
2313    case CAM_FORMAT_YUV_422_NV61:
2314    default:
2315        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2316        break;
2317    }
2318    return halPixelFormat;
2319}
2320
2321/*===========================================================================
2322 * FUNCTION   : getSensorSensitivity
2323 *
2324 * DESCRIPTION: convert iso_mode to an integer value
2325 *
2326 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2327 *
2328 ** RETURN    : sensitivity supported by sensor
2329 *
2330 *==========================================================================*/
2331int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2332{
2333    int32_t sensitivity;
2334
2335    switch (iso_mode) {
2336    case CAM_ISO_MODE_100:
2337        sensitivity = 100;
2338        break;
2339    case CAM_ISO_MODE_200:
2340        sensitivity = 200;
2341        break;
2342    case CAM_ISO_MODE_400:
2343        sensitivity = 400;
2344        break;
2345    case CAM_ISO_MODE_800:
2346        sensitivity = 800;
2347        break;
2348    case CAM_ISO_MODE_1600:
2349        sensitivity = 1600;
2350        break;
2351    default:
2352        sensitivity = -1;
2353        break;
2354    }
2355    return sensitivity;
2356}
2357
2358
2359/*===========================================================================
2360 * FUNCTION   : AddSetParmEntryToBatch
2361 *
2362 * DESCRIPTION: add set parameter entry into batch
2363 *
2364 * PARAMETERS :
2365 *   @p_table     : ptr to parameter buffer
2366 *   @paramType   : parameter type
2367 *   @paramLength : length of parameter value
2368 *   @paramValue  : ptr to parameter value
2369 *
2370 * RETURN     : int32_t type of status
2371 *              NO_ERROR  -- success
2372 *              none-zero failure code
2373 *==========================================================================*/
2374int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2375                                                          cam_intf_parm_type_t paramType,
2376                                                          uint32_t paramLength,
2377                                                          void *paramValue)
2378{
2379    int position = paramType;
2380    int current, next;
2381
2382    /*************************************************************************
2383    *                 Code to take care of linking next flags                *
2384    *************************************************************************/
2385    current = GET_FIRST_PARAM_ID(p_table);
2386    if (position == current){
2387        //DO NOTHING
2388    } else if (position < current){
2389        SET_NEXT_PARAM_ID(position, p_table, current);
2390        SET_FIRST_PARAM_ID(p_table, position);
2391    } else {
2392        /* Search for the position in the linked list where we need to slot in*/
2393        while (position > GET_NEXT_PARAM_ID(current, p_table))
2394            current = GET_NEXT_PARAM_ID(current, p_table);
2395
2396        /*If node already exists no need to alter linking*/
2397        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2398            next = GET_NEXT_PARAM_ID(current, p_table);
2399            SET_NEXT_PARAM_ID(current, p_table, position);
2400            SET_NEXT_PARAM_ID(position, p_table, next);
2401        }
2402    }
2403
2404    /*************************************************************************
2405    *                   Copy contents into entry                             *
2406    *************************************************************************/
2407
2408    if (paramLength > sizeof(parm_type_t)) {
2409        ALOGE("%s:Size of input larger than max entry size",__func__);
2410        return BAD_VALUE;
2411    }
2412    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2413    return NO_ERROR;
2414}
2415
2416/*===========================================================================
2417 * FUNCTION   : lookupFwkName
2418 *
2419 * DESCRIPTION: In case the enum is not same in fwk and backend
2420 *              make sure the parameter is correctly propogated
2421 *
2422 * PARAMETERS  :
2423 *   @arr      : map between the two enums
2424 *   @len      : len of the map
2425 *   @hal_name : name of the hal_parm to map
2426 *
2427 * RETURN     : int type of status
2428 *              fwk_name  -- success
2429 *              none-zero failure code
2430 *==========================================================================*/
2431int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2432                                             int len, int hal_name)
2433{
2434
2435    for (int i = 0; i < len; i++) {
2436        if (arr[i].hal_name == hal_name)
2437            return arr[i].fwk_name;
2438    }
2439
2440    /* Not able to find matching framework type is not necessarily
2441     * an error case. This happens when mm-camera supports more attributes
2442     * than the frameworks do */
2443    ALOGD("%s: Cannot find matching framework type", __func__);
2444    return NAME_NOT_FOUND;
2445}
2446
2447/*===========================================================================
2448 * FUNCTION   : lookupHalName
2449 *
2450 * DESCRIPTION: In case the enum is not same in fwk and backend
2451 *              make sure the parameter is correctly propogated
2452 *
2453 * PARAMETERS  :
2454 *   @arr      : map between the two enums
2455 *   @len      : len of the map
2456 *   @fwk_name : name of the hal_parm to map
2457 *
2458 * RETURN     : int32_t type of status
2459 *              hal_name  -- success
2460 *              none-zero failure code
2461 *==========================================================================*/
2462int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2463                                             int len, int fwk_name)
2464{
2465    for (int i = 0; i < len; i++) {
2466       if (arr[i].fwk_name == fwk_name)
2467           return arr[i].hal_name;
2468    }
2469    ALOGE("%s: Cannot find matching hal type", __func__);
2470    return NAME_NOT_FOUND;
2471}
2472
2473/*===========================================================================
2474 * FUNCTION   : getCapabilities
2475 *
2476 * DESCRIPTION: query camera capabilities
2477 *
2478 * PARAMETERS :
2479 *   @cameraId  : camera Id
2480 *   @info      : camera info struct to be filled in with camera capabilities
2481 *
2482 * RETURN     : int32_t type of status
2483 *              NO_ERROR  -- success
2484 *              none-zero failure code
2485 *==========================================================================*/
2486int QCamera3HardwareInterface::getCamInfo(int cameraId,
2487                                    struct camera_info *info)
2488{
2489    int rc = 0;
2490
2491    if (NULL == gCamCapability[cameraId]) {
2492        rc = initCapabilities(cameraId);
2493        if (rc < 0) {
2494            //pthread_mutex_unlock(&g_camlock);
2495            return rc;
2496        }
2497    }
2498
2499    if (NULL == gStaticMetadata[cameraId]) {
2500        rc = initStaticMetadata(cameraId);
2501        if (rc < 0) {
2502            return rc;
2503        }
2504    }
2505
2506    switch(gCamCapability[cameraId]->position) {
2507    case CAM_POSITION_BACK:
2508        info->facing = CAMERA_FACING_BACK;
2509        break;
2510
2511    case CAM_POSITION_FRONT:
2512        info->facing = CAMERA_FACING_FRONT;
2513        break;
2514
2515    default:
2516        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2517        rc = -1;
2518        break;
2519    }
2520
2521
2522    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2523    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2524    info->static_camera_characteristics = gStaticMetadata[cameraId];
2525
2526    return rc;
2527}
2528
2529/*===========================================================================
2530 * FUNCTION   : translateMetadata
2531 *
2532 * DESCRIPTION: translate the metadata into camera_metadata_t
2533 *
2534 * PARAMETERS : type of the request
2535 *
2536 *
2537 * RETURN     : success: camera_metadata_t*
2538 *              failure: NULL
2539 *
2540 *==========================================================================*/
2541camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2542{
2543    pthread_mutex_lock(&mMutex);
2544
2545    if (mDefaultMetadata[type] != NULL) {
2546        pthread_mutex_unlock(&mMutex);
2547        return mDefaultMetadata[type];
2548    }
2549    //first time we are handling this request
2550    //fill up the metadata structure using the wrapper class
2551    CameraMetadata settings;
2552    //translate from cam_capability_t to camera_metadata_tag_t
2553    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2554    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2555
2556    /*control*/
2557
2558    uint8_t controlIntent = 0;
2559    switch (type) {
2560      case CAMERA3_TEMPLATE_PREVIEW:
2561        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2562        break;
2563      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2564        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2565        break;
2566      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2567        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2568        break;
2569      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2570        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2571        break;
2572      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2573        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2574        break;
2575      default:
2576        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2577        break;
2578    }
2579    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2580
2581    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2582            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2583
2584    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2585    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2586
2587    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2588    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2589
2590    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2591    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2592
2593    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2594    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2595
2596    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2597    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2598
2599    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2600    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2601
2602    static uint8_t focusMode;
2603    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2604        ALOGE("%s: Setting focus mode to auto", __func__);
2605        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2606    } else {
2607        ALOGE("%s: Setting focus mode to off", __func__);
2608        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2609    }
2610    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2611
2612    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2613    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2614
2615    /*flash*/
2616    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2617    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2618
2619    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2620    settings.update(ANDROID_FLASH_FIRING_POWER,
2621            &flashFiringLevel, 1);
2622
2623    /* lens */
2624    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2625    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2626
2627    if (gCamCapability[mCameraId]->filter_densities_count) {
2628        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2629        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2630                        gCamCapability[mCameraId]->filter_densities_count);
2631    }
2632
2633    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2634    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2635
2636    /* frame duration */
2637    int64_t default_frame_duration = NSEC_PER_33MSEC;
2638    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2639
2640    /* sensitivity */
2641    int32_t default_sensitivity = 100;
2642    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2643
2644    mDefaultMetadata[type] = settings.release();
2645
2646    pthread_mutex_unlock(&mMutex);
2647    return mDefaultMetadata[type];
2648}
2649
2650/*===========================================================================
2651 * FUNCTION   : setFrameParameters
2652 *
2653 * DESCRIPTION: set parameters per frame as requested in the metadata from
2654 *              framework
2655 *
2656 * PARAMETERS :
2657 *   @request   : request that needs to be serviced
2658 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2659 *
2660 * RETURN     : success: NO_ERROR
2661 *              failure:
2662 *==========================================================================*/
2663int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2664                    uint32_t streamTypeMask)
2665{
2666    /*translate from camera_metadata_t type to parm_type_t*/
2667    int rc = 0;
2668    if (request->settings == NULL && mFirstRequest) {
2669        /*settings cannot be null for the first request*/
2670        return BAD_VALUE;
2671    }
2672
2673    int32_t hal_version = CAM_HAL_V3;
2674
2675    memset(mParameters, 0, sizeof(parm_buffer_t));
2676    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2677    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2678                sizeof(hal_version), &hal_version);
2679    if (rc < 0) {
2680        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2681        return BAD_VALUE;
2682    }
2683
2684    /*we need to update the frame number in the parameters*/
2685    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2686                                sizeof(request->frame_number), &(request->frame_number));
2687    if (rc < 0) {
2688        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2689        return BAD_VALUE;
2690    }
2691
2692    /* Update stream id mask where buffers are requested */
2693    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2694                                sizeof(streamTypeMask), &streamTypeMask);
2695    if (rc < 0) {
2696        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2697        return BAD_VALUE;
2698    }
2699
2700    if(request->settings != NULL){
2701        rc = translateMetadataToParameters(request);
2702    }
2703    /*set the parameters to backend*/
2704    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2705    return rc;
2706}
2707
2708/*===========================================================================
2709 * FUNCTION   : translateMetadataToParameters
2710 *
2711 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2712 *
2713 *
2714 * PARAMETERS :
2715 *   @request  : request sent from framework
2716 *
2717 *
2718 * RETURN     : success: NO_ERROR
2719 *              failure:
2720 *==========================================================================*/
2721int QCamera3HardwareInterface::translateMetadataToParameters
2722                                  (const camera3_capture_request_t *request)
2723{
2724    int rc = 0;
2725    CameraMetadata frame_settings;
2726    frame_settings = request->settings;
2727
2728    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2729        int32_t antibandingMode =
2730            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2731        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2732                sizeof(antibandingMode), &antibandingMode);
2733    }
2734
2735    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2736        int32_t expCompensation = frame_settings.find(
2737            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2738        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2739            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2740        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2741            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2742        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2743          sizeof(expCompensation), &expCompensation);
2744    }
2745
2746    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2747        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2748        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2749                sizeof(aeLock), &aeLock);
2750    }
2751    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2752        cam_fps_range_t fps_range;
2753        fps_range.min_fps =
2754            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2755        fps_range.max_fps =
2756            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2757        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2758                sizeof(fps_range), &fps_range);
2759    }
2760
2761    float focalDistance = -1.0;
2762    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2763        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2764        rc = AddSetParmEntryToBatch(mParameters,
2765                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2766                sizeof(focalDistance), &focalDistance);
2767    }
2768
2769    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2770        uint8_t fwk_focusMode =
2771            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2772        uint8_t focusMode;
2773        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2774            focusMode = CAM_FOCUS_MODE_INFINITY;
2775        } else{
2776         focusMode = lookupHalName(FOCUS_MODES_MAP,
2777                                   sizeof(FOCUS_MODES_MAP),
2778                                   fwk_focusMode);
2779        }
2780        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2781                sizeof(focusMode), &focusMode);
2782    }
2783
2784    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2785        uint8_t awbLock =
2786            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2787        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2788                sizeof(awbLock), &awbLock);
2789    }
2790
2791    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2792        uint8_t fwk_whiteLevel =
2793            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2794        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2795                sizeof(WHITE_BALANCE_MODES_MAP),
2796                fwk_whiteLevel);
2797        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2798                sizeof(whiteLevel), &whiteLevel);
2799    }
2800
2801    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2802        uint8_t fwk_effectMode =
2803            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2804        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2805                sizeof(EFFECT_MODES_MAP),
2806                fwk_effectMode);
2807        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2808                sizeof(effectMode), &effectMode);
2809    }
2810
2811    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2812        uint8_t fwk_aeMode =
2813            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2814        uint8_t aeMode;
2815        int32_t redeye;
2816
2817        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2818            aeMode = CAM_AE_MODE_OFF;
2819        } else {
2820            aeMode = CAM_AE_MODE_ON;
2821        }
2822        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2823            redeye = 1;
2824        } else {
2825            redeye = 0;
2826        }
2827
2828        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2829                                          sizeof(AE_FLASH_MODE_MAP),
2830                                          fwk_aeMode);
2831        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2832                sizeof(aeMode), &aeMode);
2833        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2834                sizeof(flashMode), &flashMode);
2835        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2836                sizeof(redeye), &redeye);
2837    }
2838
2839    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2840        uint8_t colorCorrectMode =
2841            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2842        rc =
2843            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2844                    sizeof(colorCorrectMode), &colorCorrectMode);
2845    }
2846
2847    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2848        cam_color_correct_gains_t colorCorrectGains;
2849        for (int i = 0; i < 4; i++) {
2850            colorCorrectGains.gains[i] =
2851                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2852        }
2853        rc =
2854            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2855                    sizeof(colorCorrectGains), &colorCorrectGains);
2856    }
2857
2858    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2859        cam_color_correct_matrix_t colorCorrectTransform;
2860        cam_rational_type_t transform_elem;
2861        int num = 0;
2862        for (int i = 0; i < 3; i++) {
2863           for (int j = 0; j < 3; j++) {
2864              transform_elem.numerator =
2865                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2866              transform_elem.denominator =
2867                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2868              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2869              num++;
2870           }
2871        }
2872        rc =
2873            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
2874                    sizeof(colorCorrectTransform), &colorCorrectTransform);
2875    }
2876
2877    cam_trigger_t aecTrigger;
2878    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2879    aecTrigger.trigger_id = -1;
2880    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2881        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2882        aecTrigger.trigger =
2883            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2884        aecTrigger.trigger_id =
2885            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2886    }
2887    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2888                                sizeof(aecTrigger), &aecTrigger);
2889
2890    /*af_trigger must come with a trigger id*/
2891    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2892        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2893        cam_trigger_t af_trigger;
2894        af_trigger.trigger =
2895            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2896        af_trigger.trigger_id =
2897            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2898        rc = AddSetParmEntryToBatch(mParameters,
2899                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2900    }
2901
2902    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2903        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2904        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2905                sizeof(metaMode), &metaMode);
2906        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2907           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2908           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2909                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2910                                             fwk_sceneMode);
2911           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2912                sizeof(sceneMode), &sceneMode);
2913        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2914           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2915           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2916                sizeof(sceneMode), &sceneMode);
2917        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2918           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2919           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2920                sizeof(sceneMode), &sceneMode);
2921        }
2922    }
2923
2924    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2925        int32_t demosaic =
2926            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2927        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2928                sizeof(demosaic), &demosaic);
2929    }
2930
2931    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2932        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2933        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
2934                sizeof(edgeMode), &edgeMode);
2935    }
2936
2937    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2938        int32_t edgeStrength =
2939            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2940        rc = AddSetParmEntryToBatch(mParameters,
2941                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2942    }
2943
2944    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2945        int32_t respectFlashMode = 1;
2946        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2947            uint8_t fwk_aeMode =
2948                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2949            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
2950                respectFlashMode = 0;
2951                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
2952                    __func__);
2953            }
2954        }
2955        if (respectFlashMode) {
2956            uint8_t flashMode =
2957                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2958            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
2959                                          sizeof(FLASH_MODES_MAP),
2960                                          flashMode);
2961            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
2962            // To check: CAM_INTF_META_FLASH_MODE usage
2963            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2964                          sizeof(flashMode), &flashMode);
2965        }
2966    }
2967
2968    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2969        uint8_t flashPower =
2970            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2971        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2972                sizeof(flashPower), &flashPower);
2973    }
2974
2975    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2976        int64_t flashFiringTime =
2977            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2978        rc = AddSetParmEntryToBatch(mParameters,
2979                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2980    }
2981
2982    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2983        uint8_t geometricMode =
2984            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2985        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2986                sizeof(geometricMode), &geometricMode);
2987    }
2988
2989    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2990        uint8_t geometricStrength =
2991            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2992        rc = AddSetParmEntryToBatch(mParameters,
2993                CAM_INTF_META_GEOMETRIC_STRENGTH,
2994                sizeof(geometricStrength), &geometricStrength);
2995    }
2996
2997    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2998        uint8_t hotPixelMode =
2999            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3000        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3001                sizeof(hotPixelMode), &hotPixelMode);
3002    }
3003
3004    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3005        float lensAperture =
3006            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3007        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3008                sizeof(lensAperture), &lensAperture);
3009    }
3010
3011    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3012        float filterDensity =
3013            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3014        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3015                sizeof(filterDensity), &filterDensity);
3016    }
3017
3018    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3019        float focalLength =
3020            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3021        rc = AddSetParmEntryToBatch(mParameters,
3022                CAM_INTF_META_LENS_FOCAL_LENGTH,
3023                sizeof(focalLength), &focalLength);
3024    }
3025
3026    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3027        uint8_t optStabMode =
3028            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3029        rc = AddSetParmEntryToBatch(mParameters,
3030                CAM_INTF_META_LENS_OPT_STAB_MODE,
3031                sizeof(optStabMode), &optStabMode);
3032    }
3033
3034    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3035        uint8_t noiseRedMode =
3036            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3037        rc = AddSetParmEntryToBatch(mParameters,
3038                CAM_INTF_META_NOISE_REDUCTION_MODE,
3039                sizeof(noiseRedMode), &noiseRedMode);
3040    }
3041
3042    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3043        uint8_t noiseRedStrength =
3044            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3045        rc = AddSetParmEntryToBatch(mParameters,
3046                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3047                sizeof(noiseRedStrength), &noiseRedStrength);
3048    }
3049
3050    cam_crop_region_t scalerCropRegion;
3051    bool scalerCropSet = false;
3052    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3053        scalerCropRegion.left =
3054            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3055        scalerCropRegion.top =
3056            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3057        scalerCropRegion.width =
3058            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3059        scalerCropRegion.height =
3060            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3061        rc = AddSetParmEntryToBatch(mParameters,
3062                CAM_INTF_META_SCALER_CROP_REGION,
3063                sizeof(scalerCropRegion), &scalerCropRegion);
3064        scalerCropSet = true;
3065    }
3066
3067    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3068        int64_t sensorExpTime =
3069            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3070        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3071        rc = AddSetParmEntryToBatch(mParameters,
3072                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3073                sizeof(sensorExpTime), &sensorExpTime);
3074    }
3075
3076    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3077        int64_t sensorFrameDuration =
3078            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3079        int64_t minFrameDuration = getMinFrameDuration(request);
3080        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3081        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3082            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3083        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3084        rc = AddSetParmEntryToBatch(mParameters,
3085                CAM_INTF_META_SENSOR_FRAME_DURATION,
3086                sizeof(sensorFrameDuration), &sensorFrameDuration);
3087    }
3088
3089    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3090        int32_t sensorSensitivity =
3091            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3092        if (sensorSensitivity <
3093                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3094            sensorSensitivity =
3095                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3096        if (sensorSensitivity >
3097                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3098            sensorSensitivity =
3099                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3100        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3101        rc = AddSetParmEntryToBatch(mParameters,
3102                CAM_INTF_META_SENSOR_SENSITIVITY,
3103                sizeof(sensorSensitivity), &sensorSensitivity);
3104    }
3105
3106    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3107        int32_t shadingMode =
3108            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3109        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3110                sizeof(shadingMode), &shadingMode);
3111    }
3112
3113    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3114        uint8_t shadingStrength =
3115            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3116        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3117                sizeof(shadingStrength), &shadingStrength);
3118    }
3119
3120    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3121        uint8_t fwk_facedetectMode =
3122            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3123        uint8_t facedetectMode =
3124            lookupHalName(FACEDETECT_MODES_MAP,
3125                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3126        rc = AddSetParmEntryToBatch(mParameters,
3127                CAM_INTF_META_STATS_FACEDETECT_MODE,
3128                sizeof(facedetectMode), &facedetectMode);
3129    }
3130
3131    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3132        uint8_t histogramMode =
3133            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3134        rc = AddSetParmEntryToBatch(mParameters,
3135                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3136                sizeof(histogramMode), &histogramMode);
3137    }
3138
3139    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3140        uint8_t sharpnessMapMode =
3141            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3142        rc = AddSetParmEntryToBatch(mParameters,
3143                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3144                sizeof(sharpnessMapMode), &sharpnessMapMode);
3145    }
3146
3147    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3148        uint8_t tonemapMode =
3149            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3150        rc = AddSetParmEntryToBatch(mParameters,
3151                CAM_INTF_META_TONEMAP_MODE,
3152                sizeof(tonemapMode), &tonemapMode);
3153    }
3154    int point = 0;
3155    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3156        cam_tonemap_curve_t tonemapCurveBlue;
3157        tonemapCurveBlue.tonemap_points_cnt =
3158           gCamCapability[mCameraId]->max_tone_map_curve_points;
3159        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3160            for (int j = 0; j < 2; j++) {
3161               tonemapCurveBlue.tonemap_points[i][j] =
3162                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3163               point++;
3164            }
3165        }
3166        rc = AddSetParmEntryToBatch(mParameters,
3167                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3168                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3169    }
3170    point = 0;
3171    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3172        cam_tonemap_curve_t tonemapCurveGreen;
3173        tonemapCurveGreen.tonemap_points_cnt =
3174           gCamCapability[mCameraId]->max_tone_map_curve_points;
3175        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3176            for (int j = 0; j < 2; j++) {
3177               tonemapCurveGreen.tonemap_points[i][j] =
3178                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3179               point++;
3180            }
3181        }
3182        rc = AddSetParmEntryToBatch(mParameters,
3183                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3184                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3185    }
3186    point = 0;
3187    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3188        cam_tonemap_curve_t tonemapCurveRed;
3189        tonemapCurveRed.tonemap_points_cnt =
3190           gCamCapability[mCameraId]->max_tone_map_curve_points;
3191        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3192            for (int j = 0; j < 2; j++) {
3193               tonemapCurveRed.tonemap_points[i][j] =
3194                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3195               point++;
3196            }
3197        }
3198        rc = AddSetParmEntryToBatch(mParameters,
3199                CAM_INTF_META_TONEMAP_CURVE_RED,
3200                sizeof(tonemapCurveRed), &tonemapCurveRed);
3201    }
3202
3203    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3204        uint8_t captureIntent =
3205            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3206        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3207                sizeof(captureIntent), &captureIntent);
3208    }
3209
3210    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3211        uint8_t blackLevelLock =
3212            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3213        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3214                sizeof(blackLevelLock), &blackLevelLock);
3215    }
3216
3217    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3218        uint8_t lensShadingMapMode =
3219            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3220        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3221                sizeof(lensShadingMapMode), &lensShadingMapMode);
3222    }
3223
3224    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3225        cam_area_t roi;
3226        bool reset = true;
3227        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3228        if (scalerCropSet) {
3229            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3230        }
3231        if (reset) {
3232            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3233                    sizeof(roi), &roi);
3234        }
3235    }
3236
3237    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3238        cam_area_t roi;
3239        bool reset = true;
3240        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3241        if (scalerCropSet) {
3242            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3243        }
3244        if (reset) {
3245            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3246                    sizeof(roi), &roi);
3247        }
3248    }
3249
3250    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3251        cam_area_t roi;
3252        bool reset = true;
3253        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3254        if (scalerCropSet) {
3255            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3256        }
3257        if (reset) {
3258            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3259                    sizeof(roi), &roi);
3260        }
3261    }
3262    return rc;
3263}
3264
3265/*===========================================================================
3266 * FUNCTION   : getJpegSettings
3267 *
3268 * DESCRIPTION: save the jpeg settings in the HAL
3269 *
3270 *
3271 * PARAMETERS :
3272 *   @settings  : frame settings information from framework
3273 *
3274 *
3275 * RETURN     : success: NO_ERROR
3276 *              failure:
3277 *==========================================================================*/
3278int QCamera3HardwareInterface::getJpegSettings
3279                                  (const camera_metadata_t *settings)
3280{
3281    if (mJpegSettings) {
3282        if (mJpegSettings->gps_timestamp) {
3283            free(mJpegSettings->gps_timestamp);
3284            mJpegSettings->gps_timestamp = NULL;
3285        }
3286        if (mJpegSettings->gps_coordinates) {
3287            for (int i = 0; i < 3; i++) {
3288                free(mJpegSettings->gps_coordinates[i]);
3289                mJpegSettings->gps_coordinates[i] = NULL;
3290            }
3291        }
3292        free(mJpegSettings);
3293        mJpegSettings = NULL;
3294    }
3295    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3296    CameraMetadata jpeg_settings;
3297    jpeg_settings = settings;
3298
3299    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3300        mJpegSettings->jpeg_orientation =
3301            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3302    } else {
3303        mJpegSettings->jpeg_orientation = 0;
3304    }
3305    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3306        mJpegSettings->jpeg_quality =
3307            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3308    } else {
3309        mJpegSettings->jpeg_quality = 85;
3310    }
3311    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3312        mJpegSettings->thumbnail_size.width =
3313            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3314        mJpegSettings->thumbnail_size.height =
3315            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3316    } else {
3317        mJpegSettings->thumbnail_size.width = 0;
3318        mJpegSettings->thumbnail_size.height = 0;
3319    }
3320    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3321        for (int i = 0; i < 3; i++) {
3322            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3323            *(mJpegSettings->gps_coordinates[i]) =
3324                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3325        }
3326    } else{
3327       for (int i = 0; i < 3; i++) {
3328            mJpegSettings->gps_coordinates[i] = NULL;
3329        }
3330    }
3331
3332    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3333        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3334        *(mJpegSettings->gps_timestamp) =
3335            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3336    } else {
3337        mJpegSettings->gps_timestamp = NULL;
3338    }
3339
3340    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3341        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3342        for (int i = 0; i < len; i++) {
3343            mJpegSettings->gps_processing_method[i] =
3344                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3345        }
3346        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3347            mJpegSettings->gps_processing_method[len] = '\0';
3348        }
3349    } else {
3350        mJpegSettings->gps_processing_method[0] = '\0';
3351    }
3352
3353    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3354        mJpegSettings->sensor_sensitivity =
3355            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3356    } else {
3357        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3358    }
3359
3360    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3361
3362    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3363        mJpegSettings->lens_focal_length =
3364            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3365    }
3366    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3367        mJpegSettings->exposure_compensation =
3368            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3369    }
3370    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3371    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3372    mJpegSettings->is_jpeg_format = true;
3373    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3374    return 0;
3375}
3376
3377/*===========================================================================
3378 * FUNCTION   : captureResultCb
3379 *
3380 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3381 *
3382 * PARAMETERS :
3383 *   @frame  : frame information from mm-camera-interface
3384 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3385 *   @userdata: userdata
3386 *
3387 * RETURN     : NONE
3388 *==========================================================================*/
3389void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3390                camera3_stream_buffer_t *buffer,
3391                uint32_t frame_number, void *userdata)
3392{
3393    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3394    if (hw == NULL) {
3395        ALOGE("%s: Invalid hw %p", __func__, hw);
3396        return;
3397    }
3398
3399    hw->captureResultCb(metadata, buffer, frame_number);
3400    return;
3401}
3402
3403
3404/*===========================================================================
3405 * FUNCTION   : initialize
3406 *
3407 * DESCRIPTION: Pass framework callback pointers to HAL
3408 *
3409 * PARAMETERS :
3410 *
3411 *
3412 * RETURN     : Success : 0
3413 *              Failure: -ENODEV
3414 *==========================================================================*/
3415
3416int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3417                                  const camera3_callback_ops_t *callback_ops)
3418{
3419    ALOGV("%s: E", __func__);
3420    QCamera3HardwareInterface *hw =
3421        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3422    if (!hw) {
3423        ALOGE("%s: NULL camera device", __func__);
3424        return -ENODEV;
3425    }
3426
3427    int rc = hw->initialize(callback_ops);
3428    ALOGV("%s: X", __func__);
3429    return rc;
3430}
3431
3432/*===========================================================================
3433 * FUNCTION   : configure_streams
3434 *
3435 * DESCRIPTION:
3436 *
3437 * PARAMETERS :
3438 *
3439 *
3440 * RETURN     : Success: 0
3441 *              Failure: -EINVAL (if stream configuration is invalid)
3442 *                       -ENODEV (fatal error)
3443 *==========================================================================*/
3444
3445int QCamera3HardwareInterface::configure_streams(
3446        const struct camera3_device *device,
3447        camera3_stream_configuration_t *stream_list)
3448{
3449    ALOGV("%s: E", __func__);
3450    QCamera3HardwareInterface *hw =
3451        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3452    if (!hw) {
3453        ALOGE("%s: NULL camera device", __func__);
3454        return -ENODEV;
3455    }
3456    int rc = hw->configureStreams(stream_list);
3457    ALOGV("%s: X", __func__);
3458    return rc;
3459}
3460
3461/*===========================================================================
3462 * FUNCTION   : register_stream_buffers
3463 *
3464 * DESCRIPTION: Register stream buffers with the device
3465 *
3466 * PARAMETERS :
3467 *
3468 * RETURN     :
3469 *==========================================================================*/
3470int QCamera3HardwareInterface::register_stream_buffers(
3471        const struct camera3_device *device,
3472        const camera3_stream_buffer_set_t *buffer_set)
3473{
3474    ALOGV("%s: E", __func__);
3475    QCamera3HardwareInterface *hw =
3476        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3477    if (!hw) {
3478        ALOGE("%s: NULL camera device", __func__);
3479        return -ENODEV;
3480    }
3481    int rc = hw->registerStreamBuffers(buffer_set);
3482    ALOGV("%s: X", __func__);
3483    return rc;
3484}
3485
3486/*===========================================================================
3487 * FUNCTION   : construct_default_request_settings
3488 *
3489 * DESCRIPTION: Configure a settings buffer to meet the required use case
3490 *
3491 * PARAMETERS :
3492 *
3493 *
3494 * RETURN     : Success: Return valid metadata
3495 *              Failure: Return NULL
3496 *==========================================================================*/
3497const camera_metadata_t* QCamera3HardwareInterface::
3498    construct_default_request_settings(const struct camera3_device *device,
3499                                        int type)
3500{
3501
3502    ALOGV("%s: E", __func__);
3503    camera_metadata_t* fwk_metadata = NULL;
3504    QCamera3HardwareInterface *hw =
3505        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3506    if (!hw) {
3507        ALOGE("%s: NULL camera device", __func__);
3508        return NULL;
3509    }
3510
3511    fwk_metadata = hw->translateCapabilityToMetadata(type);
3512
3513    ALOGV("%s: X", __func__);
3514    return fwk_metadata;
3515}
3516
3517/*===========================================================================
3518 * FUNCTION   : process_capture_request
3519 *
3520 * DESCRIPTION:
3521 *
3522 * PARAMETERS :
3523 *
3524 *
3525 * RETURN     :
3526 *==========================================================================*/
3527int QCamera3HardwareInterface::process_capture_request(
3528                    const struct camera3_device *device,
3529                    camera3_capture_request_t *request)
3530{
3531    ALOGV("%s: E", __func__);
3532    QCamera3HardwareInterface *hw =
3533        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3534    if (!hw) {
3535        ALOGE("%s: NULL camera device", __func__);
3536        return -EINVAL;
3537    }
3538
3539    int rc = hw->processCaptureRequest(request);
3540    ALOGV("%s: X", __func__);
3541    return rc;
3542}
3543
3544/*===========================================================================
3545 * FUNCTION   : get_metadata_vendor_tag_ops
3546 *
3547 * DESCRIPTION:
3548 *
3549 * PARAMETERS :
3550 *
3551 *
3552 * RETURN     :
3553 *==========================================================================*/
3554
3555void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3556                const struct camera3_device *device,
3557                vendor_tag_query_ops_t* ops)
3558{
3559    ALOGV("%s: E", __func__);
3560    QCamera3HardwareInterface *hw =
3561        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3562    if (!hw) {
3563        ALOGE("%s: NULL camera device", __func__);
3564        return;
3565    }
3566
3567    hw->getMetadataVendorTagOps(ops);
3568    ALOGV("%s: X", __func__);
3569    return;
3570}
3571
3572/*===========================================================================
3573 * FUNCTION   : dump
3574 *
3575 * DESCRIPTION:
3576 *
3577 * PARAMETERS :
3578 *
3579 *
3580 * RETURN     :
3581 *==========================================================================*/
3582
3583void QCamera3HardwareInterface::dump(
3584                const struct camera3_device *device, int fd)
3585{
3586    ALOGV("%s: E", __func__);
3587    QCamera3HardwareInterface *hw =
3588        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3589    if (!hw) {
3590        ALOGE("%s: NULL camera device", __func__);
3591        return;
3592    }
3593
3594    hw->dump(fd);
3595    ALOGV("%s: X", __func__);
3596    return;
3597}
3598
3599/*===========================================================================
3600 * FUNCTION   : close_camera_device
3601 *
3602 * DESCRIPTION:
3603 *
3604 * PARAMETERS :
3605 *
3606 *
3607 * RETURN     :
3608 *==========================================================================*/
3609int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3610{
3611    ALOGV("%s: E", __func__);
3612    int ret = NO_ERROR;
3613    QCamera3HardwareInterface *hw =
3614        reinterpret_cast<QCamera3HardwareInterface *>(
3615            reinterpret_cast<camera3_device_t *>(device)->priv);
3616    if (!hw) {
3617        ALOGE("NULL camera device");
3618        return BAD_VALUE;
3619    }
3620    delete hw;
3621
3622    pthread_mutex_lock(&mCameraSessionLock);
3623    mCameraSessionActive = 0;
3624    pthread_mutex_unlock(&mCameraSessionLock);
3625    ALOGV("%s: X", __func__);
3626    return ret;
3627}
3628
3629/*===========================================================================
3630 * FUNCTION   : getWaveletDenoiseProcessPlate
3631 *
3632 * DESCRIPTION: query wavelet denoise process plate
3633 *
3634 * PARAMETERS : None
3635 *
3636 * RETURN     : WNR prcocess plate vlaue
3637 *==========================================================================*/
3638cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3639{
3640    char prop[PROPERTY_VALUE_MAX];
3641    memset(prop, 0, sizeof(prop));
3642    property_get("persist.denoise.process.plates", prop, "0");
3643    int processPlate = atoi(prop);
3644    switch(processPlate) {
3645    case 0:
3646        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3647    case 1:
3648        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3649    case 2:
3650        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3651    case 3:
3652        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3653    default:
3654        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3655    }
3656}
3657
3658/*===========================================================================
3659 * FUNCTION   : needRotationReprocess
3660 *
3661 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3662 *
3663 * PARAMETERS : none
3664 *
3665 * RETURN     : true: needed
3666 *              false: no need
3667 *==========================================================================*/
3668bool QCamera3HardwareInterface::needRotationReprocess()
3669{
3670
3671    if (!mJpegSettings->is_jpeg_format) {
3672        // RAW image, no need to reprocess
3673        return false;
3674    }
3675
3676    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3677        mJpegSettings->jpeg_orientation > 0) {
3678        // current rotation is not zero, and pp has the capability to process rotation
3679        ALOGD("%s: need do reprocess for rotation", __func__);
3680        return true;
3681    }
3682
3683    return false;
3684}
3685
3686/*===========================================================================
3687 * FUNCTION   : needReprocess
3688 *
3689 * DESCRIPTION: if reprocess in needed
3690 *
3691 * PARAMETERS : none
3692 *
3693 * RETURN     : true: needed
3694 *              false: no need
3695 *==========================================================================*/
3696bool QCamera3HardwareInterface::needReprocess()
3697{
3698    if (!mJpegSettings->is_jpeg_format) {
3699        // RAW image, no need to reprocess
3700        return false;
3701    }
3702
3703    if ((mJpegSettings->min_required_pp_mask > 0) ||
3704         isWNREnabled()) {
3705        // TODO: add for ZSL HDR later
3706        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3707        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3708        return true;
3709    }
3710    return needRotationReprocess();
3711}
3712
3713/*===========================================================================
3714 * FUNCTION   : addOnlineReprocChannel
3715 *
3716 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3717 *              coming from input channel
3718 *
3719 * PARAMETERS :
3720 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3721 *
3722 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3723 *==========================================================================*/
3724QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3725                                                      QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3726{
3727    int32_t rc = NO_ERROR;
3728    QCamera3ReprocessChannel *pChannel = NULL;
3729    if (pInputChannel == NULL) {
3730        ALOGE("%s: input channel obj is NULL", __func__);
3731        return NULL;
3732    }
3733
3734    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3735            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3736    if (NULL == pChannel) {
3737        ALOGE("%s: no mem for reprocess channel", __func__);
3738        return NULL;
3739    }
3740
3741    // Capture channel, only need snapshot and postview streams start together
3742    mm_camera_channel_attr_t attr;
3743    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3744    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3745    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3746    rc = pChannel->initialize();
3747    if (rc != NO_ERROR) {
3748        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3749        delete pChannel;
3750        return NULL;
3751    }
3752
3753    // pp feature config
3754    cam_pp_feature_config_t pp_config;
3755    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3756    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3757        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3758        pp_config.sharpness = 10;
3759    }
3760
3761    if (isWNREnabled()) {
3762        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3763        pp_config.denoise2d.denoise_enable = 1;
3764        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3765    }
3766    if (needRotationReprocess()) {
3767        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3768        int rotation = mJpegSettings->jpeg_orientation;
3769        if (rotation == 0) {
3770            pp_config.rotation = ROTATE_0;
3771        } else if (rotation == 90) {
3772            pp_config.rotation = ROTATE_90;
3773        } else if (rotation == 180) {
3774            pp_config.rotation = ROTATE_180;
3775        } else if (rotation == 270) {
3776            pp_config.rotation = ROTATE_270;
3777        }
3778    }
3779
3780   rc = pChannel->addReprocStreamsFromSource(pp_config,
3781                                             pInputChannel,
3782                                             mMetadataChannel);
3783
3784    if (rc != NO_ERROR) {
3785        delete pChannel;
3786        return NULL;
3787    }
3788    return pChannel;
3789}
3790
3791int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3792{
3793    return gCamCapability[mCameraId]->min_num_pp_bufs;
3794}
3795
3796bool QCamera3HardwareInterface::isWNREnabled() {
3797    return gCamCapability[mCameraId]->isWnrSupported;
3798}
3799
3800}; //end namespace qcamera
3801