QCamera3HWI.cpp revision 45465b8d86296ba7aeeb1ee4f9ba07b4bddf1652
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
87    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
88    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
92    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
93    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
94    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
95    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
96    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
97    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
98    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
99    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
100    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
101};
102
103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
104    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
105    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
106    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
107    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
120    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
123    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
124    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
128    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
129    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
130    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
131};
132
133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
136};
137
138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
139                                             320, 240, 176, 144, 0, 0};
140
141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
142    initialize:                         QCamera3HardwareInterface::initialize,
143    configure_streams:                  QCamera3HardwareInterface::configure_streams,
144    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
145    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
146    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
147    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
148    dump:                               QCamera3HardwareInterface::dump,
149    flush:                              QCamera3HardwareInterface::flush,
150    reserved:                           {0},
151};
152
153
154/*===========================================================================
155 * FUNCTION   : QCamera3HardwareInterface
156 *
157 * DESCRIPTION: constructor of QCamera3HardwareInterface
158 *
159 * PARAMETERS :
160 *   @cameraId  : camera ID
161 *
162 * RETURN     : none
163 *==========================================================================*/
164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
165    : mCameraId(cameraId),
166      mCameraHandle(NULL),
167      mCameraOpened(false),
168      mCameraInitialized(false),
169      mCallbackOps(NULL),
170      mInputStream(NULL),
171      mMetadataChannel(NULL),
172      mPictureChannel(NULL),
173      mFirstRequest(false),
174      mParamHeap(NULL),
175      mParameters(NULL),
176      mJpegSettings(NULL),
177      mIsZslMode(false),
178      mMinProcessedFrameDuration(0),
179      mMinJpegFrameDuration(0),
180      mMinRawFrameDuration(0),
181      m_pPowerModule(NULL)
182{
183    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
184    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
185    mCameraDevice.common.close = close_camera_device;
186    mCameraDevice.ops = &mCameraOps;
187    mCameraDevice.priv = this;
188    gCamCapability[cameraId]->version = CAM_HAL_V3;
189    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
190    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
191    gCamCapability[cameraId]->min_num_pp_bufs = 3;
192
193    pthread_cond_init(&mRequestCond, NULL);
194    mPendingRequest = 0;
195    mCurrentRequestId = -1;
196    pthread_mutex_init(&mMutex, NULL);
197
198    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
199        mDefaultMetadata[i] = NULL;
200
201#ifdef HAS_MULTIMEDIA_HINTS
202    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
203        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
204    }
205#endif
206}
207
208/*===========================================================================
209 * FUNCTION   : ~QCamera3HardwareInterface
210 *
211 * DESCRIPTION: destructor of QCamera3HardwareInterface
212 *
213 * PARAMETERS : none
214 *
215 * RETURN     : none
216 *==========================================================================*/
217QCamera3HardwareInterface::~QCamera3HardwareInterface()
218{
219    ALOGV("%s: E", __func__);
220    /* We need to stop all streams before deleting any stream */
221        /*flush the metadata list*/
222    if (!mStoredMetadataList.empty()) {
223        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
224              m != mStoredMetadataList.end(); m++) {
225            mMetadataChannel->bufDone(m->meta_buf);
226            free(m->meta_buf);
227            m = mStoredMetadataList.erase(m);
228        }
229    }
230
231    // NOTE: 'camera3_stream_t *' objects are already freed at
232    //        this stage by the framework
233    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
234        it != mStreamInfo.end(); it++) {
235        QCamera3Channel *channel = (*it)->channel;
236        if (channel) {
237            channel->stop();
238        }
239    }
240
241    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
242        it != mStreamInfo.end(); it++) {
243        QCamera3Channel *channel = (*it)->channel;
244        if ((*it)->registered && (*it)->buffer_set.buffers) {
245             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
246        }
247        if (channel)
248            delete channel;
249        free (*it);
250    }
251
252    mPictureChannel = NULL;
253
254    if (mJpegSettings != NULL) {
255        free(mJpegSettings);
256        mJpegSettings = NULL;
257    }
258
259    /* Clean up all channels */
260    if (mCameraInitialized) {
261        if (mMetadataChannel) {
262            mMetadataChannel->stop();
263            delete mMetadataChannel;
264            mMetadataChannel = NULL;
265        }
266        deinitParameters();
267    }
268
269    if (mCameraOpened)
270        closeCamera();
271
272    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
273        if (mDefaultMetadata[i])
274            free_camera_metadata(mDefaultMetadata[i]);
275
276    pthread_cond_destroy(&mRequestCond);
277
278    pthread_mutex_destroy(&mMutex);
279    ALOGV("%s: X", __func__);
280}
281
282/*===========================================================================
283 * FUNCTION   : openCamera
284 *
285 * DESCRIPTION: open camera
286 *
287 * PARAMETERS :
288 *   @hw_device  : double ptr for camera device struct
289 *
290 * RETURN     : int32_t type of status
291 *              NO_ERROR  -- success
292 *              none-zero failure code
293 *==========================================================================*/
294int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
295{
296    int rc = 0;
297    pthread_mutex_lock(&mCameraSessionLock);
298    if (mCameraSessionActive) {
299        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
300        pthread_mutex_unlock(&mCameraSessionLock);
301        return -EUSERS;
302    }
303
304    if (mCameraOpened) {
305        *hw_device = NULL;
306        return PERMISSION_DENIED;
307    }
308
309    rc = openCamera();
310    if (rc == 0) {
311        *hw_device = &mCameraDevice.common;
312        mCameraSessionActive = 1;
313    } else
314        *hw_device = NULL;
315
316#ifdef HAS_MULTIMEDIA_HINTS
317    if (rc == 0) {
318        if (m_pPowerModule) {
319            if (m_pPowerModule->powerHint) {
320                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
321                        (void *)"state=1");
322            }
323        }
324    }
325#endif
326    pthread_mutex_unlock(&mCameraSessionLock);
327    return rc;
328}
329
330/*===========================================================================
331 * FUNCTION   : openCamera
332 *
333 * DESCRIPTION: open camera
334 *
335 * PARAMETERS : none
336 *
337 * RETURN     : int32_t type of status
338 *              NO_ERROR  -- success
339 *              none-zero failure code
340 *==========================================================================*/
341int QCamera3HardwareInterface::openCamera()
342{
343    if (mCameraHandle) {
344        ALOGE("Failure: Camera already opened");
345        return ALREADY_EXISTS;
346    }
347    mCameraHandle = camera_open(mCameraId);
348    if (!mCameraHandle) {
349        ALOGE("camera_open failed.");
350        return UNKNOWN_ERROR;
351    }
352
353    mCameraOpened = true;
354
355    return NO_ERROR;
356}
357
358/*===========================================================================
359 * FUNCTION   : closeCamera
360 *
361 * DESCRIPTION: close camera
362 *
363 * PARAMETERS : none
364 *
365 * RETURN     : int32_t type of status
366 *              NO_ERROR  -- success
367 *              none-zero failure code
368 *==========================================================================*/
369int QCamera3HardwareInterface::closeCamera()
370{
371    int rc = NO_ERROR;
372
373    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
374    mCameraHandle = NULL;
375    mCameraOpened = false;
376
377#ifdef HAS_MULTIMEDIA_HINTS
378    if (rc == NO_ERROR) {
379        if (m_pPowerModule) {
380            if (m_pPowerModule->powerHint) {
381                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
382                        (void *)"state=0");
383            }
384        }
385    }
386#endif
387
388    return rc;
389}
390
391/*===========================================================================
392 * FUNCTION   : initialize
393 *
394 * DESCRIPTION: Initialize frameworks callback functions
395 *
396 * PARAMETERS :
397 *   @callback_ops : callback function to frameworks
398 *
399 * RETURN     :
400 *
401 *==========================================================================*/
402int QCamera3HardwareInterface::initialize(
403        const struct camera3_callback_ops *callback_ops)
404{
405    int rc;
406
407    pthread_mutex_lock(&mMutex);
408
409    rc = initParameters();
410    if (rc < 0) {
411        ALOGE("%s: initParamters failed %d", __func__, rc);
412       goto err1;
413    }
414    mCallbackOps = callback_ops;
415
416    pthread_mutex_unlock(&mMutex);
417    mCameraInitialized = true;
418    return 0;
419
420err1:
421    pthread_mutex_unlock(&mMutex);
422    return rc;
423}
424
425/*===========================================================================
426 * FUNCTION   : configureStreams
427 *
428 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
429 *              and output streams.
430 *
431 * PARAMETERS :
432 *   @stream_list : streams to be configured
433 *
434 * RETURN     :
435 *
436 *==========================================================================*/
437int QCamera3HardwareInterface::configureStreams(
438        camera3_stream_configuration_t *streamList)
439{
440    int rc = 0;
441    mIsZslMode = false;
442
443    // Sanity check stream_list
444    if (streamList == NULL) {
445        ALOGE("%s: NULL stream configuration", __func__);
446        return BAD_VALUE;
447    }
448    if (streamList->streams == NULL) {
449        ALOGE("%s: NULL stream list", __func__);
450        return BAD_VALUE;
451    }
452
453    if (streamList->num_streams < 1) {
454        ALOGE("%s: Bad number of streams requested: %d", __func__,
455                streamList->num_streams);
456        return BAD_VALUE;
457    }
458
459    /* first invalidate all the steams in the mStreamList
460     * if they appear again, they will be validated */
461    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
462            it != mStreamInfo.end(); it++) {
463        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
464        channel->stop();
465        (*it)->status = INVALID;
466    }
467    if (mMetadataChannel) {
468        /* If content of mStreamInfo is not 0, there is metadata stream */
469        mMetadataChannel->stop();
470    }
471
472    pthread_mutex_lock(&mMutex);
473
474    camera3_stream_t *inputStream = NULL;
475    camera3_stream_t *jpegStream = NULL;
476    cam_stream_size_info_t stream_config_info;
477
478    for (size_t i = 0; i < streamList->num_streams; i++) {
479        camera3_stream_t *newStream = streamList->streams[i];
480        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
481                __func__, newStream->stream_type, newStream->format,
482                 newStream->width, newStream->height);
483        //if the stream is in the mStreamList validate it
484        bool stream_exists = false;
485        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
486                it != mStreamInfo.end(); it++) {
487            if ((*it)->stream == newStream) {
488                QCamera3Channel *channel =
489                    (QCamera3Channel*)(*it)->stream->priv;
490                stream_exists = true;
491                (*it)->status = RECONFIGURE;
492                /*delete the channel object associated with the stream because
493                  we need to reconfigure*/
494                delete channel;
495                (*it)->stream->priv = NULL;
496                (*it)->channel = NULL;
497            }
498        }
499        if (!stream_exists) {
500            //new stream
501            stream_info_t* stream_info;
502            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
503            stream_info->stream = newStream;
504            stream_info->status = VALID;
505            stream_info->registered = 0;
506            stream_info->channel = NULL;
507            mStreamInfo.push_back(stream_info);
508        }
509        if (newStream->stream_type == CAMERA3_STREAM_INPUT
510                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
511            if (inputStream != NULL) {
512                ALOGE("%s: Multiple input streams requested!", __func__);
513                pthread_mutex_unlock(&mMutex);
514                return BAD_VALUE;
515            }
516            inputStream = newStream;
517        }
518        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
519            jpegStream = newStream;
520        }
521    }
522    mInputStream = inputStream;
523
524    /*clean up invalid streams*/
525    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
526            it != mStreamInfo.end();) {
527        if(((*it)->status) == INVALID){
528            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
529            delete channel;
530            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
531            free(*it);
532            it = mStreamInfo.erase(it);
533        } else {
534            it++;
535        }
536    }
537    if (mMetadataChannel) {
538        delete mMetadataChannel;
539        mMetadataChannel = NULL;
540    }
541
542    //Create metadata channel and initialize it
543    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
544                    mCameraHandle->ops, captureResultCb,
545                    &gCamCapability[mCameraId]->padding_info, this);
546    if (mMetadataChannel == NULL) {
547        ALOGE("%s: failed to allocate metadata channel", __func__);
548        rc = -ENOMEM;
549        pthread_mutex_unlock(&mMutex);
550        return rc;
551    }
552    rc = mMetadataChannel->initialize();
553    if (rc < 0) {
554        ALOGE("%s: metadata channel initialization failed", __func__);
555        delete mMetadataChannel;
556        pthread_mutex_unlock(&mMutex);
557        return rc;
558    }
559
560    /* Allocate channel objects for the requested streams */
561    for (size_t i = 0; i < streamList->num_streams; i++) {
562        camera3_stream_t *newStream = streamList->streams[i];
563        uint32_t stream_usage = newStream->usage;
564        stream_config_info.stream_sizes[i].width = newStream->width;
565        stream_config_info.stream_sizes[i].height = newStream->height;
566        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
567            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
568            //for zsl stream the size is jpeg size
569            stream_config_info.stream_sizes[i].width = jpegStream->width;
570            stream_config_info.stream_sizes[i].height = jpegStream->height;
571            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
572        } else {
573           //for non zsl streams find out the format
574           switch (newStream->format) {
575           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
576              {
577                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
578                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
579                 } else {
580                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
581                 }
582              }
583              break;
584           case HAL_PIXEL_FORMAT_YCbCr_420_888:
585              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
586              break;
587           case HAL_PIXEL_FORMAT_BLOB:
588              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
589              break;
590           default:
591              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
592              break;
593           }
594        }
595        if (newStream->priv == NULL) {
596            //New stream, construct channel
597            switch (newStream->stream_type) {
598            case CAMERA3_STREAM_INPUT:
599                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
600                break;
601            case CAMERA3_STREAM_BIDIRECTIONAL:
602                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
603                    GRALLOC_USAGE_HW_CAMERA_WRITE;
604                break;
605            case CAMERA3_STREAM_OUTPUT:
606                /* For video encoding stream, set read/write rarely
607                 * flag so that they may be set to un-cached */
608                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
609                    newStream->usage =
610                         (GRALLOC_USAGE_SW_READ_RARELY |
611                         GRALLOC_USAGE_SW_WRITE_RARELY |
612                         GRALLOC_USAGE_HW_CAMERA_WRITE);
613                else
614                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
615                break;
616            default:
617                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
618                break;
619            }
620
621            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
622                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
623                QCamera3Channel *channel;
624                switch (newStream->format) {
625                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
626                case HAL_PIXEL_FORMAT_YCbCr_420_888:
627                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
628                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
629                        jpegStream) {
630                        uint32_t width = jpegStream->width;
631                        uint32_t height = jpegStream->height;
632                        mIsZslMode = true;
633                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
634                            mCameraHandle->ops, captureResultCb,
635                            &gCamCapability[mCameraId]->padding_info, this, newStream,
636                            width, height);
637                    } else
638                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
639                            mCameraHandle->ops, captureResultCb,
640                            &gCamCapability[mCameraId]->padding_info, this, newStream);
641                    if (channel == NULL) {
642                        ALOGE("%s: allocation of channel failed", __func__);
643                        pthread_mutex_unlock(&mMutex);
644                        return -ENOMEM;
645                    }
646
647                    newStream->priv = channel;
648                    break;
649                case HAL_PIXEL_FORMAT_BLOB:
650                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
651                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
652                            mCameraHandle->ops, captureResultCb,
653                            &gCamCapability[mCameraId]->padding_info, this, newStream);
654                    if (mPictureChannel == NULL) {
655                        ALOGE("%s: allocation of channel failed", __func__);
656                        pthread_mutex_unlock(&mMutex);
657                        return -ENOMEM;
658                    }
659                    newStream->priv = (QCamera3Channel*)mPictureChannel;
660                    break;
661
662                //TODO: Add support for app consumed format?
663                default:
664                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
665                    break;
666                }
667            }
668
669            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
670                    it != mStreamInfo.end(); it++) {
671                if ((*it)->stream == newStream) {
672                    (*it)->channel = (QCamera3Channel*) newStream->priv;
673                    break;
674                }
675            }
676        } else {
677            // Channel already exists for this stream
678            // Do nothing for now
679        }
680    }
681
682    int32_t hal_version = CAM_HAL_V3;
683    stream_config_info.num_streams = streamList->num_streams;
684
685    // settings/parameters don't carry over for new configureStreams
686    memset(mParameters, 0, sizeof(parm_buffer_t));
687
688    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
689    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
690                sizeof(hal_version), &hal_version);
691
692    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
693                sizeof(stream_config_info), &stream_config_info);
694
695    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
696
697    /*For the streams to be reconfigured we need to register the buffers
698      since the framework wont*/
699    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
700            it != mStreamInfo.end(); it++) {
701        if ((*it)->status == RECONFIGURE) {
702            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
703            /*only register buffers for streams that have already been
704              registered*/
705            if ((*it)->registered) {
706                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
707                        (*it)->buffer_set.buffers);
708                if (rc != NO_ERROR) {
709                    ALOGE("%s: Failed to register the buffers of old stream,\
710                            rc = %d", __func__, rc);
711                }
712                ALOGV("%s: channel %p has %d buffers",
713                        __func__, channel, (*it)->buffer_set.num_buffers);
714            }
715        }
716
717        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
718        if (index == NAME_NOT_FOUND) {
719            mPendingBuffersMap.add((*it)->stream, 0);
720        } else {
721            mPendingBuffersMap.editValueAt(index) = 0;
722        }
723    }
724
725    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
726    mPendingRequestsList.clear();
727
728    mPendingFrameDropList.clear();
729
730    /*flush the metadata list*/
731    if (!mStoredMetadataList.empty()) {
732        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
733              m != mStoredMetadataList.end(); m++) {
734            mMetadataChannel->bufDone(m->meta_buf);
735            free(m->meta_buf);
736            m = mStoredMetadataList.erase(m);
737        }
738    }
739
740    mFirstRequest = true;
741
742    //Get min frame duration for this streams configuration
743    deriveMinFrameDuration();
744
745    pthread_mutex_unlock(&mMutex);
746    return rc;
747}
748
749/*===========================================================================
750 * FUNCTION   : validateCaptureRequest
751 *
752 * DESCRIPTION: validate a capture request from camera service
753 *
754 * PARAMETERS :
755 *   @request : request from framework to process
756 *
757 * RETURN     :
758 *
759 *==========================================================================*/
760int QCamera3HardwareInterface::validateCaptureRequest(
761                    camera3_capture_request_t *request)
762{
763    ssize_t idx = 0;
764    const camera3_stream_buffer_t *b;
765    CameraMetadata meta;
766
767    /* Sanity check the request */
768    if (request == NULL) {
769        ALOGE("%s: NULL capture request", __func__);
770        return BAD_VALUE;
771    }
772
773    uint32_t frameNumber = request->frame_number;
774    if (request->input_buffer != NULL &&
775            request->input_buffer->stream != mInputStream) {
776        ALOGE("%s: Request %d: Input buffer not from input stream!",
777                __FUNCTION__, frameNumber);
778        return BAD_VALUE;
779    }
780    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
781        ALOGE("%s: Request %d: No output buffers provided!",
782                __FUNCTION__, frameNumber);
783        return BAD_VALUE;
784    }
785    if (request->input_buffer != NULL) {
786        b = request->input_buffer;
787        QCamera3Channel *channel =
788            static_cast<QCamera3Channel*>(b->stream->priv);
789        if (channel == NULL) {
790            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
791                    __func__, frameNumber, idx);
792            return BAD_VALUE;
793        }
794        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
795            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
796                    __func__, frameNumber, idx);
797            return BAD_VALUE;
798        }
799        if (b->release_fence != -1) {
800            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
801                    __func__, frameNumber, idx);
802            return BAD_VALUE;
803        }
804        if (b->buffer == NULL) {
805            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
806                    __func__, frameNumber, idx);
807            return BAD_VALUE;
808        }
809    }
810
811    // Validate all buffers
812    b = request->output_buffers;
813    do {
814        QCamera3Channel *channel =
815                static_cast<QCamera3Channel*>(b->stream->priv);
816        if (channel == NULL) {
817            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
818                    __func__, frameNumber, idx);
819            return BAD_VALUE;
820        }
821        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
822            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
823                    __func__, frameNumber, idx);
824            return BAD_VALUE;
825        }
826        if (b->release_fence != -1) {
827            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
828                    __func__, frameNumber, idx);
829            return BAD_VALUE;
830        }
831        if (b->buffer == NULL) {
832            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
833                    __func__, frameNumber, idx);
834            return BAD_VALUE;
835        }
836        idx++;
837        b = request->output_buffers + idx;
838    } while (idx < (ssize_t)request->num_output_buffers);
839
840    return NO_ERROR;
841}
842
843/*===========================================================================
844 * FUNCTION   : deriveMinFrameDuration
845 *
846 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
847 *              on currently configured streams.
848 *
849 * PARAMETERS : NONE
850 *
851 * RETURN     : NONE
852 *
853 *==========================================================================*/
854void QCamera3HardwareInterface::deriveMinFrameDuration()
855{
856    int32_t maxJpegDimension, maxProcessedDimension;
857
858    maxJpegDimension = 0;
859    maxProcessedDimension = 0;
860
861    // Figure out maximum jpeg, processed, and raw dimensions
862    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
863        it != mStreamInfo.end(); it++) {
864
865        // Input stream doesn't have valid stream_type
866        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
867            continue;
868
869        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
870        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
871            if (dimension > maxJpegDimension)
872                maxJpegDimension = dimension;
873        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
874            if (dimension > maxProcessedDimension)
875                maxProcessedDimension = dimension;
876        }
877    }
878
879    //Assume all jpeg dimensions are in processed dimensions.
880    if (maxJpegDimension > maxProcessedDimension)
881        maxProcessedDimension = maxJpegDimension;
882
883    //Find minimum durations for processed, jpeg, and raw
884    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
885    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
886        if (maxProcessedDimension ==
887            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
888            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
889            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
890            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
891            break;
892        }
893    }
894}
895
896/*===========================================================================
897 * FUNCTION   : getMinFrameDuration
898 *
899 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
900 *              and current request configuration.
901 *
902 * PARAMETERS : @request: requset sent by the frameworks
903 *
904 * RETURN     : min farme duration for a particular request
905 *
906 *==========================================================================*/
907int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
908{
909    bool hasJpegStream = false;
910    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
911        const camera3_stream_t *stream = request->output_buffers[i].stream;
912        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
913            hasJpegStream = true;
914    }
915
916    if (!hasJpegStream)
917        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
918    else
919        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
920}
921
922/*===========================================================================
923 * FUNCTION   : registerStreamBuffers
924 *
925 * DESCRIPTION: Register buffers for a given stream with the HAL device.
926 *
927 * PARAMETERS :
928 *   @stream_list : streams to be configured
929 *
930 * RETURN     :
931 *
932 *==========================================================================*/
933int QCamera3HardwareInterface::registerStreamBuffers(
934        const camera3_stream_buffer_set_t *buffer_set)
935{
936    int rc = 0;
937
938    pthread_mutex_lock(&mMutex);
939
940    if (buffer_set == NULL) {
941        ALOGE("%s: Invalid buffer_set parameter.", __func__);
942        pthread_mutex_unlock(&mMutex);
943        return -EINVAL;
944    }
945    if (buffer_set->stream == NULL) {
946        ALOGE("%s: Invalid stream parameter.", __func__);
947        pthread_mutex_unlock(&mMutex);
948        return -EINVAL;
949    }
950    if (buffer_set->num_buffers < 1) {
951        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
952        pthread_mutex_unlock(&mMutex);
953        return -EINVAL;
954    }
955    if (buffer_set->buffers == NULL) {
956        ALOGE("%s: Invalid buffers parameter.", __func__);
957        pthread_mutex_unlock(&mMutex);
958        return -EINVAL;
959    }
960
961    camera3_stream_t *stream = buffer_set->stream;
962    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
963
964    //set the buffer_set in the mStreamInfo array
965    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
966            it != mStreamInfo.end(); it++) {
967        if ((*it)->stream == stream) {
968            uint32_t numBuffers = buffer_set->num_buffers;
969            (*it)->buffer_set.stream = buffer_set->stream;
970            (*it)->buffer_set.num_buffers = numBuffers;
971            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
972            if ((*it)->buffer_set.buffers == NULL) {
973                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
974                pthread_mutex_unlock(&mMutex);
975                return -ENOMEM;
976            }
977            for (size_t j = 0; j < numBuffers; j++){
978                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
979            }
980            (*it)->registered = 1;
981        }
982    }
983    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
984    if (rc < 0) {
985        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
986        pthread_mutex_unlock(&mMutex);
987        return -ENODEV;
988    }
989
990    pthread_mutex_unlock(&mMutex);
991    return NO_ERROR;
992}
993
994/*===========================================================================
995 * FUNCTION   : processCaptureRequest
996 *
997 * DESCRIPTION: process a capture request from camera service
998 *
999 * PARAMETERS :
1000 *   @request : request from framework to process
1001 *
1002 * RETURN     :
1003 *
1004 *==========================================================================*/
1005int QCamera3HardwareInterface::processCaptureRequest(
1006                    camera3_capture_request_t *request)
1007{
1008    int rc = NO_ERROR;
1009    int32_t request_id;
1010    CameraMetadata meta;
1011    MetadataBufferInfo reproc_meta;
1012    int queueMetadata = 0;
1013
1014    pthread_mutex_lock(&mMutex);
1015
1016    rc = validateCaptureRequest(request);
1017    if (rc != NO_ERROR) {
1018        ALOGE("%s: incoming request is not valid", __func__);
1019        pthread_mutex_unlock(&mMutex);
1020        return rc;
1021    }
1022
1023    meta = request->settings;
1024
1025    // For first capture request, send capture intent, and
1026    // stream on all streams
1027    if (mFirstRequest) {
1028
1029        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1030            int32_t hal_version = CAM_HAL_V3;
1031            uint8_t captureIntent =
1032                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1033
1034            memset(mParameters, 0, sizeof(parm_buffer_t));
1035            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1036            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1037                sizeof(hal_version), &hal_version);
1038            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1039                sizeof(captureIntent), &captureIntent);
1040            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1041                mParameters);
1042        }
1043
1044        mMetadataChannel->start();
1045        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1046            it != mStreamInfo.end(); it++) {
1047            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1048            channel->start();
1049        }
1050    }
1051
1052    uint32_t frameNumber = request->frame_number;
1053    uint32_t streamTypeMask = 0;
1054
1055    if (meta.exists(ANDROID_REQUEST_ID)) {
1056        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1057        mCurrentRequestId = request_id;
1058        ALOGV("%s: Received request with id: %d",__func__, request_id);
1059    } else if (mFirstRequest || mCurrentRequestId == -1){
1060        ALOGE("%s: Unable to find request id field, \
1061                & no previous id available", __func__);
1062        return NAME_NOT_FOUND;
1063    } else {
1064        ALOGV("%s: Re-using old request id", __func__);
1065        request_id = mCurrentRequestId;
1066    }
1067
1068    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1069                                    __func__, __LINE__,
1070                                    request->num_output_buffers,
1071                                    request->input_buffer,
1072                                    frameNumber);
1073    // Acquire all request buffers first
1074    int blob_request = 0;
1075    for (size_t i = 0; i < request->num_output_buffers; i++) {
1076        const camera3_stream_buffer_t& output = request->output_buffers[i];
1077        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1078        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1079
1080        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1081        //Call function to store local copy of jpeg data for encode params.
1082            blob_request = 1;
1083            rc = getJpegSettings(request->settings);
1084            if (rc < 0) {
1085                ALOGE("%s: failed to get jpeg parameters", __func__);
1086                pthread_mutex_unlock(&mMutex);
1087                return rc;
1088            }
1089        }
1090
1091        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1092        if (rc != OK) {
1093            ALOGE("%s: fence wait failed %d", __func__, rc);
1094            pthread_mutex_unlock(&mMutex);
1095            return rc;
1096        }
1097        streamTypeMask |= channel->getStreamTypeMask();
1098    }
1099
1100    rc = setFrameParameters(request, streamTypeMask);
1101    if (rc < 0) {
1102        ALOGE("%s: fail to set frame parameters", __func__);
1103        pthread_mutex_unlock(&mMutex);
1104        return rc;
1105    }
1106
1107    /* Update pending request list and pending buffers map */
1108    PendingRequestInfo pendingRequest;
1109    pendingRequest.frame_number = frameNumber;
1110    pendingRequest.num_buffers = request->num_output_buffers;
1111    pendingRequest.request_id = request_id;
1112    pendingRequest.blob_request = blob_request;
1113    if (blob_request)
1114        pendingRequest.input_jpeg_settings = *mJpegSettings;
1115    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1116
1117    for (size_t i = 0; i < request->num_output_buffers; i++) {
1118        RequestedBufferInfo requestedBuf;
1119        requestedBuf.stream = request->output_buffers[i].stream;
1120        requestedBuf.buffer = NULL;
1121        pendingRequest.buffers.push_back(requestedBuf);
1122
1123        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1124    }
1125    mPendingRequestsList.push_back(pendingRequest);
1126
1127    // Notify metadata channel we receive a request
1128    mMetadataChannel->request(NULL, frameNumber);
1129
1130    // Call request on other streams
1131    for (size_t i = 0; i < request->num_output_buffers; i++) {
1132        const camera3_stream_buffer_t& output = request->output_buffers[i];
1133        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1134        mm_camera_buf_def_t *pInputBuffer = NULL;
1135
1136        if (channel == NULL) {
1137            ALOGE("%s: invalid channel pointer for stream", __func__);
1138            continue;
1139        }
1140
1141        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1142            QCamera3RegularChannel* inputChannel = NULL;
1143            if(request->input_buffer != NULL){
1144                //Try to get the internal format
1145                inputChannel = (QCamera3RegularChannel*)
1146                    request->input_buffer->stream->priv;
1147                if(inputChannel == NULL ){
1148                    ALOGE("%s: failed to get input channel handle", __func__);
1149                } else {
1150                    pInputBuffer =
1151                        inputChannel->getInternalFormatBuffer(
1152                                request->input_buffer->buffer);
1153                    ALOGD("%s: Input buffer dump",__func__);
1154                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1155                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1156                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1157                    ALOGD("Handle:%p", request->input_buffer->buffer);
1158                    //TODO: need to get corresponding metadata and send it to pproc
1159                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1160                         m != mStoredMetadataList.end(); m++) {
1161                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1162                            reproc_meta.meta_buf = m->meta_buf;
1163                            queueMetadata = 1;
1164                            break;
1165                        }
1166                    }
1167                }
1168            }
1169            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1170                            pInputBuffer,(QCamera3Channel*)inputChannel);
1171            if (queueMetadata) {
1172                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1173            }
1174        } else {
1175            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1176                __LINE__, output.buffer, frameNumber);
1177            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1178                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1179                     m != mStoredMetadataList.end(); m++) {
1180                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1181                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1182                            mMetadataChannel->bufDone(m->meta_buf);
1183                            free(m->meta_buf);
1184                            m = mStoredMetadataList.erase(m);
1185                            break;
1186                        }
1187                   }
1188                }
1189            }
1190            rc = channel->request(output.buffer, frameNumber);
1191        }
1192        if (rc < 0)
1193            ALOGE("%s: request failed", __func__);
1194    }
1195
1196    mFirstRequest = false;
1197    // Added a timed condition wait
1198    struct timespec ts;
1199    uint8_t isValidTimeout = 1;
1200    rc = clock_gettime(CLOCK_REALTIME, &ts);
1201    if (rc < 0) {
1202        isValidTimeout = 0;
1203        ALOGE("%s: Error reading the real time clock!!", __func__);
1204    }
1205    else {
1206        // Make timeout as 5 sec for request to be honored
1207        ts.tv_sec += 5;
1208    }
1209    //Block on conditional variable
1210    mPendingRequest = 1;
1211    while (mPendingRequest == 1) {
1212        if (!isValidTimeout) {
1213            ALOGV("%s: Blocking on conditional wait", __func__);
1214            pthread_cond_wait(&mRequestCond, &mMutex);
1215        }
1216        else {
1217            ALOGV("%s: Blocking on timed conditional wait", __func__);
1218            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1219            if (rc == ETIMEDOUT) {
1220                rc = -ENODEV;
1221                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1222                break;
1223            }
1224        }
1225        ALOGV("%s: Unblocked", __func__);
1226    }
1227
1228    pthread_mutex_unlock(&mMutex);
1229
1230    return rc;
1231}
1232
1233/*===========================================================================
1234 * FUNCTION   : getMetadataVendorTagOps
1235 *
1236 * DESCRIPTION:
1237 *
1238 * PARAMETERS :
1239 *
1240 *
1241 * RETURN     :
1242 *==========================================================================*/
1243void QCamera3HardwareInterface::getMetadataVendorTagOps(
1244                    vendor_tag_query_ops_t* /*ops*/)
1245{
1246    /* Enable locks when we eventually add Vendor Tags */
1247    /*
1248    pthread_mutex_lock(&mMutex);
1249
1250    pthread_mutex_unlock(&mMutex);
1251    */
1252    return;
1253}
1254
1255/*===========================================================================
1256 * FUNCTION   : dump
1257 *
1258 * DESCRIPTION:
1259 *
1260 * PARAMETERS :
1261 *
1262 *
1263 * RETURN     :
1264 *==========================================================================*/
1265void QCamera3HardwareInterface::dump(int /*fd*/)
1266{
1267    /*Enable lock when we implement this function*/
1268    /*
1269    pthread_mutex_lock(&mMutex);
1270
1271    pthread_mutex_unlock(&mMutex);
1272    */
1273    return;
1274}
1275
1276/*===========================================================================
1277 * FUNCTION   : flush
1278 *
1279 * DESCRIPTION:
1280 *
1281 * PARAMETERS :
1282 *
1283 *
1284 * RETURN     :
1285 *==========================================================================*/
1286int QCamera3HardwareInterface::flush()
1287{
1288    /*Enable lock when we implement this function*/
1289    /*
1290    pthread_mutex_lock(&mMutex);
1291
1292    pthread_mutex_unlock(&mMutex);
1293    */
1294    return 0;
1295}
1296
1297/*===========================================================================
1298 * FUNCTION   : captureResultCb
1299 *
1300 * DESCRIPTION: Callback handler for all capture result
1301 *              (streams, as well as metadata)
1302 *
1303 * PARAMETERS :
1304 *   @metadata : metadata information
1305 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1306 *               NULL if metadata.
1307 *
1308 * RETURN     : NONE
1309 *==========================================================================*/
1310void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1311                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1312{
1313    pthread_mutex_lock(&mMutex);
1314
1315    if (metadata_buf) {
1316        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1317        int32_t frame_number_valid = *(int32_t *)
1318            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1319        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1320            CAM_INTF_META_PENDING_REQUESTS, metadata);
1321        uint32_t frame_number = *(uint32_t *)
1322            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1323        const struct timeval *tv = (const struct timeval *)
1324            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1325        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1326            tv->tv_usec * NSEC_PER_USEC;
1327        cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1328            POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1329
1330        if (!frame_number_valid) {
1331            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1332            mMetadataChannel->bufDone(metadata_buf);
1333            free(metadata_buf);
1334            goto done_metadata;
1335        }
1336        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1337                frame_number, capture_time);
1338
1339        // Go through the pending requests info and send shutter/results to frameworks
1340        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1341                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1342            camera3_capture_result_t result;
1343            camera3_notify_msg_t notify_msg;
1344            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1345
1346            // Flush out all entries with less or equal frame numbers.
1347
1348            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1349            //Right now it's the same as metadata timestamp
1350
1351            //TODO: When there is metadata drop, how do we derive the timestamp of
1352            //dropped frames? For now, we fake the dropped timestamp by substracting
1353            //from the reported timestamp
1354            nsecs_t current_capture_time = capture_time -
1355                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1356
1357            // Send shutter notify to frameworks
1358            notify_msg.type = CAMERA3_MSG_SHUTTER;
1359            notify_msg.message.shutter.frame_number = i->frame_number;
1360            notify_msg.message.shutter.timestamp = current_capture_time;
1361            mCallbackOps->notify(mCallbackOps, &notify_msg);
1362            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1363                    i->frame_number, capture_time);
1364
1365            // Check whether any stream buffer corresponding to this is dropped or not
1366            // If dropped, then send the ERROR_BUFFER for the corresponding stream
1367            if (cam_frame_drop.frame_dropped) {
1368                camera3_notify_msg_t notify_msg;
1369                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1370                        j != i->buffers.end(); j++) {
1371                    QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1372                    uint32_t streamTypeMask = channel->getStreamTypeMask();
1373                    if (streamTypeMask & cam_frame_drop.stream_type_mask) {
1374                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1375                        ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
1376                               __func__, i->frame_number, streamTypeMask);
1377                        notify_msg.type = CAMERA3_MSG_ERROR;
1378                        notify_msg.message.error.frame_number = i->frame_number;
1379                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1380                        notify_msg.message.error.error_stream = j->stream;
1381                        mCallbackOps->notify(mCallbackOps, &notify_msg);
1382                        ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
1383                               __func__, i->frame_number, streamTypeMask);
1384                        PendingFrameDropInfo PendingFrameDrop;
1385                        PendingFrameDrop.frame_number=i->frame_number;
1386                        PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
1387                        // Add the Frame drop info to mPendingFrameDropList
1388                        mPendingFrameDropList.push_back(PendingFrameDrop);
1389                    }
1390                }
1391            }
1392
1393            // Send empty metadata with already filled buffers for dropped metadata
1394            // and send valid metadata with already filled buffers for current metadata
1395            if (i->frame_number < frame_number) {
1396                CameraMetadata dummyMetadata;
1397                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1398                        &current_capture_time, 1);
1399                dummyMetadata.update(ANDROID_REQUEST_ID,
1400                        &(i->request_id), 1);
1401                result.result = dummyMetadata.release();
1402            } else {
1403                result.result = translateCbMetadataToResultMetadata(metadata,
1404                        current_capture_time, i->request_id, i->blob_request,
1405                        &(i->input_jpeg_settings));
1406                if (mIsZslMode) {
1407                   int found_metadata = 0;
1408                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1409                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1410                        j != i->buffers.end(); j++) {
1411                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1412                         //check if corresp. zsl already exists in the stored metadata list
1413                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1414                               m != mStoredMetadataList.begin(); m++) {
1415                            if (m->frame_number == frame_number) {
1416                               m->meta_buf = metadata_buf;
1417                               found_metadata = 1;
1418                               break;
1419                            }
1420                         }
1421                         if (!found_metadata) {
1422                            MetadataBufferInfo store_meta_info;
1423                            store_meta_info.meta_buf = metadata_buf;
1424                            store_meta_info.frame_number = frame_number;
1425                            mStoredMetadataList.push_back(store_meta_info);
1426                            found_metadata = 1;
1427                         }
1428                      }
1429                   }
1430                   if (!found_metadata) {
1431                       if (!i->input_buffer_present && i->blob_request) {
1432                          //livesnapshot or fallback non-zsl snapshot case
1433                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1434                                j != i->buffers.end(); j++){
1435                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1436                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1437                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1438                                 break;
1439                              }
1440                         }
1441                       } else {
1442                            //return the metadata immediately
1443                            mMetadataChannel->bufDone(metadata_buf);
1444                            free(metadata_buf);
1445                       }
1446                   }
1447               } else if (!mIsZslMode && i->blob_request) {
1448                   //If it is a blob request then send the metadata to the picture channel
1449                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1450               } else {
1451                   // Return metadata buffer
1452                   mMetadataChannel->bufDone(metadata_buf);
1453                   free(metadata_buf);
1454               }
1455
1456            }
1457            if (!result.result) {
1458                ALOGE("%s: metadata is NULL", __func__);
1459            }
1460            result.frame_number = i->frame_number;
1461            result.num_output_buffers = 0;
1462            result.output_buffers = NULL;
1463            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1464                    j != i->buffers.end(); j++) {
1465                if (j->buffer) {
1466                    result.num_output_buffers++;
1467                }
1468            }
1469
1470            if (result.num_output_buffers > 0) {
1471                camera3_stream_buffer_t *result_buffers =
1472                    new camera3_stream_buffer_t[result.num_output_buffers];
1473                if (!result_buffers) {
1474                    ALOGE("%s: Fatal error: out of memory", __func__);
1475                }
1476                size_t result_buffers_idx = 0;
1477                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1478                     j != i->buffers.end(); j++) {
1479                     if (j->buffer) {
1480                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1481                              m != mPendingFrameDropList.end(); m++) {
1482                              QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1483                              uint32_t streamTypeMask = channel->getStreamTypeMask();
1484                              if((m->stream_type_mask & streamTypeMask) &&
1485                                  (m->frame_number==frame_number)) {
1486                                  j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1487                                  ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1488                                        __func__, frame_number, streamTypeMask);
1489                                  m = mPendingFrameDropList.erase(m);
1490                                  break;
1491                              }
1492                         }
1493                         result_buffers[result_buffers_idx++] = *(j->buffer);
1494                         free(j->buffer);
1495                         j->buffer = NULL;
1496                         mPendingBuffersMap.editValueFor(j->stream)--;
1497                    }
1498                }
1499                result.output_buffers = result_buffers;
1500
1501                mCallbackOps->process_capture_result(mCallbackOps, &result);
1502                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1503                        __func__, result.frame_number, current_capture_time);
1504                free_camera_metadata((camera_metadata_t *)result.result);
1505                delete[] result_buffers;
1506            } else {
1507                mCallbackOps->process_capture_result(mCallbackOps, &result);
1508                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1509                        __func__, result.frame_number, current_capture_time);
1510                free_camera_metadata((camera_metadata_t *)result.result);
1511            }
1512            // erase the element from the list
1513            i = mPendingRequestsList.erase(i);
1514        }
1515
1516
1517done_metadata:
1518        bool max_buffers_dequeued = false;
1519        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1520            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1521            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1522            if (queued_buffers == stream->max_buffers) {
1523                max_buffers_dequeued = true;
1524                break;
1525            }
1526        }
1527        if (!max_buffers_dequeued && !pending_requests) {
1528            // Unblock process_capture_request
1529            mPendingRequest = 0;
1530            pthread_cond_signal(&mRequestCond);
1531        }
1532    } else {
1533        // If the frame number doesn't exist in the pending request list,
1534        // directly send the buffer to the frameworks, and update pending buffers map
1535        // Otherwise, book-keep the buffer.
1536        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1537        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1538            i++;
1539        }
1540        if (i == mPendingRequestsList.end()) {
1541            // Verify all pending requests frame_numbers are greater
1542            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1543                    j != mPendingRequestsList.end(); j++) {
1544                if (j->frame_number < frame_number) {
1545                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1546                            __func__, j->frame_number, frame_number);
1547                }
1548            }
1549            camera3_capture_result_t result;
1550            result.result = NULL;
1551            result.frame_number = frame_number;
1552            result.num_output_buffers = 1;
1553            for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1554                  m != mPendingFrameDropList.end(); m++) {
1555                QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1556                uint32_t streamTypeMask = channel->getStreamTypeMask();
1557                if((m->stream_type_mask & streamTypeMask) &&
1558                    (m->frame_number==frame_number) ) {
1559                    buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1560                    ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1561                            __func__, frame_number, streamTypeMask);
1562                    m = mPendingFrameDropList.erase(m);
1563                    break;
1564                }
1565            }
1566            result.output_buffers = buffer;
1567            ALOGV("%s: result frame_number = %d, buffer = %p",
1568                    __func__, frame_number, buffer);
1569            mPendingBuffersMap.editValueFor(buffer->stream)--;
1570            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1571                int found = 0;
1572                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1573                      k != mStoredMetadataList.end(); k++) {
1574                    if (k->frame_number == frame_number) {
1575                        k->zsl_buf_hdl = buffer->buffer;
1576                        found = 1;
1577                        break;
1578                    }
1579                }
1580                if (!found) {
1581                   MetadataBufferInfo meta_info;
1582                   meta_info.frame_number = frame_number;
1583                   meta_info.zsl_buf_hdl = buffer->buffer;
1584                   mStoredMetadataList.push_back(meta_info);
1585                }
1586            }
1587            mCallbackOps->process_capture_result(mCallbackOps, &result);
1588        } else {
1589            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1590                    j != i->buffers.end(); j++) {
1591                if (j->stream == buffer->stream) {
1592                    if (j->buffer != NULL) {
1593                        ALOGE("%s: Error: buffer is already set", __func__);
1594                    } else {
1595                        j->buffer = (camera3_stream_buffer_t *)malloc(
1596                                sizeof(camera3_stream_buffer_t));
1597                        *(j->buffer) = *buffer;
1598                        ALOGV("%s: cache buffer %p at result frame_number %d",
1599                                __func__, buffer, frame_number);
1600                    }
1601                }
1602            }
1603        }
1604    }
1605    pthread_mutex_unlock(&mMutex);
1606    return;
1607}
1608
1609/*===========================================================================
1610 * FUNCTION   : translateCbMetadataToResultMetadata
1611 *
1612 * DESCRIPTION:
1613 *
1614 * PARAMETERS :
1615 *   @metadata : metadata information from callback
1616 *
1617 * RETURN     : camera_metadata_t*
1618 *              metadata in a format specified by fwk
1619 *==========================================================================*/
1620camera_metadata_t*
1621QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1622                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1623                                 int32_t request_id, int32_t BlobRequest,
1624                                 jpeg_settings_t* inputjpegsettings)
1625{
1626    CameraMetadata camMetadata;
1627    camera_metadata_t* resultMetadata;
1628
1629    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1630    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1631
1632    // Update the JPEG related info
1633    if (BlobRequest) {
1634        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1635        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1636
1637        int32_t thumbnailSizeTable[2];
1638        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1639        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1640        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1641        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1642               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1643
1644        if (inputjpegsettings->gps_coordinates[0]) {
1645            double gpsCoordinates[3];
1646            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1647            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1648            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1649            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1650            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1651                 gpsCoordinates[1],gpsCoordinates[2]);
1652        }
1653
1654        if (inputjpegsettings->gps_timestamp) {
1655            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1656            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1657        }
1658
1659        String8 str(inputjpegsettings->gps_processing_method);
1660        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1661            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1662        }
1663    }
1664    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1665    uint8_t next_entry;
1666    while (curr_entry != CAM_INTF_PARM_MAX) {
1667       ALOGV("%s: META_DEBUG: cur_entry is %d", __func__, curr_entry);
1668       switch (curr_entry) {
1669         case CAM_INTF_META_FACE_DETECTION:{
1670             cam_face_detection_data_t *faceDetectionInfo =
1671                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1672             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1673             int32_t faceIds[numFaces];
1674             uint8_t faceScores[numFaces];
1675             int32_t faceRectangles[numFaces * 4];
1676             int32_t faceLandmarks[numFaces * 6];
1677             int j = 0, k = 0;
1678             for (int i = 0; i < numFaces; i++) {
1679                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1680                 faceScores[i] = faceDetectionInfo->faces[i].score;
1681                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1682                         faceRectangles+j, -1);
1683                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1684                 j+= 4;
1685                 k+= 6;
1686             }
1687             if (numFaces > 0) {
1688                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1689                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1690                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1691                     faceRectangles, numFaces*4);
1692                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1693                     faceLandmarks, numFaces*6);
1694             }
1695            break;
1696            }
1697         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1698             uint8_t  *color_correct_mode =
1699                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1700             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1701             break;
1702          }
1703         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1704             int32_t  *ae_precapture_id =
1705                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1706             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1707             break;
1708          }
1709         case CAM_INTF_META_AEC_ROI: {
1710            cam_area_t  *hAeRegions =
1711                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1712             int32_t aeRegions[5];
1713             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1714             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1715             break;
1716          }
1717          case CAM_INTF_META_AEC_STATE:{
1718             uint8_t *ae_state =
1719                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1720             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1721             break;
1722          }
1723          case CAM_INTF_PARM_FOCUS_MODE:{
1724             uint8_t  *focusMode =
1725                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1726             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1727                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1728             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1729             break;
1730          }
1731          case CAM_INTF_META_AF_ROI:{
1732             /*af regions*/
1733             cam_area_t  *hAfRegions =
1734                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1735             int32_t afRegions[5];
1736             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1737             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1738             break;
1739          }
1740          case CAM_INTF_META_AF_STATE: {
1741             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1742             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1743             break;
1744          }
1745          case CAM_INTF_META_AF_TRIGGER_ID: {
1746             int32_t  *afTriggerId =
1747                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1748             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1749             break;
1750          }
1751          case CAM_INTF_PARM_WHITE_BALANCE: {
1752               uint8_t  *whiteBalance =
1753                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1754               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1755                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1756                   *whiteBalance);
1757               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1758               break;
1759          }
1760          case CAM_INTF_META_AWB_REGIONS: {
1761             /*awb regions*/
1762             cam_area_t  *hAwbRegions =
1763                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1764             int32_t awbRegions[5];
1765             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1766             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1767             break;
1768          }
1769          case CAM_INTF_META_AWB_STATE: {
1770             uint8_t  *whiteBalanceState =
1771                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1772             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1773             break;
1774          }
1775          case CAM_INTF_META_MODE: {
1776             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1777             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1778             break;
1779          }
1780          case CAM_INTF_META_EDGE_MODE: {
1781             uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1782             camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1783             break;
1784          }
1785          case CAM_INTF_META_FLASH_POWER: {
1786             uint8_t  *flashPower =
1787                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1788             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1789             break;
1790          }
1791          case CAM_INTF_META_FLASH_FIRING_TIME: {
1792             int64_t  *flashFiringTime =
1793                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1794             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1795             break;
1796          }
1797          case CAM_INTF_META_FLASH_STATE: {
1798             uint8_t  *flashState =
1799                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1800             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1801             break;
1802          }
1803          case CAM_INTF_META_FLASH_MODE:{
1804             uint8_t *flashMode = (uint8_t*)
1805                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1806             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1807             break;
1808          }
1809          case CAM_INTF_META_HOTPIXEL_MODE: {
1810              uint8_t  *hotPixelMode =
1811                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1812              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1813              break;
1814          }
1815          case CAM_INTF_META_LENS_APERTURE:{
1816             float  *lensAperture =
1817                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1818             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1819             break;
1820          }
1821          case CAM_INTF_META_LENS_FILTERDENSITY: {
1822             float  *filterDensity =
1823                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1824             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1825             break;
1826          }
1827          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1828             float  *focalLength =
1829                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1830             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1831             break;
1832          }
1833          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1834             float  *focusDistance =
1835                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1836             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1837             break;
1838          }
1839          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1840             float  *focusRange =
1841                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1842             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1843          }
1844          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1845             uint8_t  *opticalStab =
1846                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1847             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1848          }
1849          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1850             uint8_t  *noiseRedMode =
1851                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1852             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1853             break;
1854          }
1855          case CAM_INTF_META_SCALER_CROP_REGION: {
1856             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1857             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1858             int32_t scalerCropRegion[4];
1859             scalerCropRegion[0] = hScalerCropRegion->left;
1860             scalerCropRegion[1] = hScalerCropRegion->top;
1861             scalerCropRegion[2] = hScalerCropRegion->width;
1862             scalerCropRegion[3] = hScalerCropRegion->height;
1863             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1864             break;
1865          }
1866          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1867             int64_t  *sensorExpTime =
1868                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1869             mMetadataResponse.exposure_time = *sensorExpTime;
1870             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1871             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1872             break;
1873          }
1874          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1875             int64_t  *sensorFameDuration =
1876                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1877             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1878             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1879             break;
1880          }
1881          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1882             int32_t  *sensorSensitivity =
1883                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1884             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1885             mMetadataResponse.iso_speed = *sensorSensitivity;
1886             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1887             break;
1888          }
1889          case CAM_INTF_META_SHADING_MODE: {
1890             uint8_t  *shadingMode =
1891                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1892             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1893             break;
1894          }
1895          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1896             uint8_t  *faceDetectMode =
1897                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1898             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1899                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1900                                                        *faceDetectMode);
1901             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1902             break;
1903          }
1904          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1905             uint8_t  *histogramMode =
1906                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1907             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1908             break;
1909          }
1910          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1911               uint8_t  *sharpnessMapMode =
1912                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1913               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1914                                  sharpnessMapMode, 1);
1915               break;
1916           }
1917          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1918               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1919               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1920               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1921                                  (int32_t*)sharpnessMap->sharpness,
1922                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1923               break;
1924          }
1925          case CAM_INTF_META_LENS_SHADING_MAP: {
1926               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1927               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1928               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1929               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1930               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1931                                  (float*)lensShadingMap->lens_shading,
1932                                  4*map_width*map_height);
1933               break;
1934          }
1935          case CAM_INTF_META_TONEMAP_CURVES:{
1936             //Populate CAM_INTF_META_TONEMAP_CURVES
1937             /* ch0 = G, ch 1 = B, ch 2 = R*/
1938             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1939             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1940             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1941                                (float*)tonemap->curves[0].tonemap_points,
1942                                tonemap->tonemap_points_cnt * 2);
1943
1944             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1945                                (float*)tonemap->curves[1].tonemap_points,
1946                                tonemap->tonemap_points_cnt * 2);
1947
1948             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1949                                (float*)tonemap->curves[2].tonemap_points,
1950                                tonemap->tonemap_points_cnt * 2);
1951             break;
1952          }
1953          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1954             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1955             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1956             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1957             break;
1958          }
1959          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1960              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1961              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1962              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1963                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1964              break;
1965          }
1966          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1967             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1968             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1969             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1970                       predColorCorrectionGains->gains, 4);
1971             break;
1972          }
1973          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1974             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1975                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1976             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1977                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1978             break;
1979
1980          }
1981          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1982             uint8_t *blackLevelLock = (uint8_t*)
1983               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1984             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1985             break;
1986          }
1987          case CAM_INTF_META_SCENE_FLICKER:{
1988             uint8_t *sceneFlicker = (uint8_t*)
1989             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1990             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1991             break;
1992          }
1993          case CAM_INTF_PARM_LED_MODE:
1994             break;
1995          default:
1996             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
1997                   __func__, curr_entry);
1998             break;
1999       }
2000       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2001       curr_entry = next_entry;
2002    }
2003    resultMetadata = camMetadata.release();
2004    return resultMetadata;
2005}
2006
2007/*===========================================================================
2008 * FUNCTION   : convertToRegions
2009 *
2010 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2011 *
2012 * PARAMETERS :
2013 *   @rect   : cam_rect_t struct to convert
2014 *   @region : int32_t destination array
2015 *   @weight : if we are converting from cam_area_t, weight is valid
2016 *             else weight = -1
2017 *
2018 *==========================================================================*/
2019void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2020    region[0] = rect.left;
2021    region[1] = rect.top;
2022    region[2] = rect.left + rect.width;
2023    region[3] = rect.top + rect.height;
2024    if (weight > -1) {
2025        region[4] = weight;
2026    }
2027}
2028
2029/*===========================================================================
2030 * FUNCTION   : convertFromRegions
2031 *
2032 * DESCRIPTION: helper method to convert from array to cam_rect_t
2033 *
2034 * PARAMETERS :
2035 *   @rect   : cam_rect_t struct to convert
2036 *   @region : int32_t destination array
2037 *   @weight : if we are converting from cam_area_t, weight is valid
2038 *             else weight = -1
2039 *
2040 *==========================================================================*/
2041void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2042                                                   const camera_metadata_t *settings,
2043                                                   uint32_t tag){
2044    CameraMetadata frame_settings;
2045    frame_settings = settings;
2046    int32_t x_min = frame_settings.find(tag).data.i32[0];
2047    int32_t y_min = frame_settings.find(tag).data.i32[1];
2048    int32_t x_max = frame_settings.find(tag).data.i32[2];
2049    int32_t y_max = frame_settings.find(tag).data.i32[3];
2050    roi->weight = frame_settings.find(tag).data.i32[4];
2051    roi->rect.left = x_min;
2052    roi->rect.top = y_min;
2053    roi->rect.width = x_max - x_min;
2054    roi->rect.height = y_max - y_min;
2055}
2056
2057/*===========================================================================
2058 * FUNCTION   : resetIfNeededROI
2059 *
2060 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2061 *              crop region
2062 *
2063 * PARAMETERS :
2064 *   @roi       : cam_area_t struct to resize
2065 *   @scalerCropRegion : cam_crop_region_t region to compare against
2066 *
2067 *
2068 *==========================================================================*/
2069bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2070                                                 const cam_crop_region_t* scalerCropRegion)
2071{
2072    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2073    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2074    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2075    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2076    if ((roi_x_max < scalerCropRegion->left) ||
2077        (roi_y_max < scalerCropRegion->top)  ||
2078        (roi->rect.left > crop_x_max) ||
2079        (roi->rect.top > crop_y_max)){
2080        return false;
2081    }
2082    if (roi->rect.left < scalerCropRegion->left) {
2083        roi->rect.left = scalerCropRegion->left;
2084    }
2085    if (roi->rect.top < scalerCropRegion->top) {
2086        roi->rect.top = scalerCropRegion->top;
2087    }
2088    if (roi_x_max > crop_x_max) {
2089        roi_x_max = crop_x_max;
2090    }
2091    if (roi_y_max > crop_y_max) {
2092        roi_y_max = crop_y_max;
2093    }
2094    roi->rect.width = roi_x_max - roi->rect.left;
2095    roi->rect.height = roi_y_max - roi->rect.top;
2096    return true;
2097}
2098
2099/*===========================================================================
2100 * FUNCTION   : convertLandmarks
2101 *
2102 * DESCRIPTION: helper method to extract the landmarks from face detection info
2103 *
2104 * PARAMETERS :
2105 *   @face   : cam_rect_t struct to convert
2106 *   @landmarks : int32_t destination array
2107 *
2108 *
2109 *==========================================================================*/
2110void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2111{
2112    landmarks[0] = face.left_eye_center.x;
2113    landmarks[1] = face.left_eye_center.y;
2114    landmarks[2] = face.right_eye_center.x;
2115    landmarks[3] = face.right_eye_center.y;
2116    landmarks[4] = face.mouth_center.x;
2117    landmarks[5] = face.mouth_center.y;
2118}
2119
2120#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2121/*===========================================================================
2122 * FUNCTION   : initCapabilities
2123 *
2124 * DESCRIPTION: initialize camera capabilities in static data struct
2125 *
2126 * PARAMETERS :
2127 *   @cameraId  : camera Id
2128 *
2129 * RETURN     : int32_t type of status
2130 *              NO_ERROR  -- success
2131 *              none-zero failure code
2132 *==========================================================================*/
2133int QCamera3HardwareInterface::initCapabilities(int cameraId)
2134{
2135    int rc = 0;
2136    mm_camera_vtbl_t *cameraHandle = NULL;
2137    QCamera3HeapMemory *capabilityHeap = NULL;
2138
2139    cameraHandle = camera_open(cameraId);
2140    if (!cameraHandle) {
2141        ALOGE("%s: camera_open failed", __func__);
2142        rc = -1;
2143        goto open_failed;
2144    }
2145
2146    capabilityHeap = new QCamera3HeapMemory();
2147    if (capabilityHeap == NULL) {
2148        ALOGE("%s: creation of capabilityHeap failed", __func__);
2149        goto heap_creation_failed;
2150    }
2151    /* Allocate memory for capability buffer */
2152    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2153    if(rc != OK) {
2154        ALOGE("%s: No memory for cappability", __func__);
2155        goto allocate_failed;
2156    }
2157
2158    /* Map memory for capability buffer */
2159    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2160    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2161                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2162                                capabilityHeap->getFd(0),
2163                                sizeof(cam_capability_t));
2164    if(rc < 0) {
2165        ALOGE("%s: failed to map capability buffer", __func__);
2166        goto map_failed;
2167    }
2168
2169    /* Query Capability */
2170    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2171    if(rc < 0) {
2172        ALOGE("%s: failed to query capability",__func__);
2173        goto query_failed;
2174    }
2175    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2176    if (!gCamCapability[cameraId]) {
2177        ALOGE("%s: out of memory", __func__);
2178        goto query_failed;
2179    }
2180    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2181                                        sizeof(cam_capability_t));
2182    rc = 0;
2183
2184query_failed:
2185    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2186                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2187map_failed:
2188    capabilityHeap->deallocate();
2189allocate_failed:
2190    delete capabilityHeap;
2191heap_creation_failed:
2192    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2193    cameraHandle = NULL;
2194open_failed:
2195    return rc;
2196}
2197
2198/*===========================================================================
2199 * FUNCTION   : initParameters
2200 *
2201 * DESCRIPTION: initialize camera parameters
2202 *
2203 * PARAMETERS :
2204 *
2205 * RETURN     : int32_t type of status
2206 *              NO_ERROR  -- success
2207 *              none-zero failure code
2208 *==========================================================================*/
2209int QCamera3HardwareInterface::initParameters()
2210{
2211    int rc = 0;
2212
2213    //Allocate Set Param Buffer
2214    mParamHeap = new QCamera3HeapMemory();
2215    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2216    if(rc != OK) {
2217        rc = NO_MEMORY;
2218        ALOGE("Failed to allocate SETPARM Heap memory");
2219        delete mParamHeap;
2220        mParamHeap = NULL;
2221        return rc;
2222    }
2223
2224    //Map memory for parameters buffer
2225    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2226            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2227            mParamHeap->getFd(0),
2228            sizeof(parm_buffer_t));
2229    if(rc < 0) {
2230        ALOGE("%s:failed to map SETPARM buffer",__func__);
2231        rc = FAILED_TRANSACTION;
2232        mParamHeap->deallocate();
2233        delete mParamHeap;
2234        mParamHeap = NULL;
2235        return rc;
2236    }
2237
2238    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2239    return rc;
2240}
2241
2242/*===========================================================================
2243 * FUNCTION   : deinitParameters
2244 *
2245 * DESCRIPTION: de-initialize camera parameters
2246 *
2247 * PARAMETERS :
2248 *
2249 * RETURN     : NONE
2250 *==========================================================================*/
2251void QCamera3HardwareInterface::deinitParameters()
2252{
2253    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2254            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2255
2256    mParamHeap->deallocate();
2257    delete mParamHeap;
2258    mParamHeap = NULL;
2259
2260    mParameters = NULL;
2261}
2262
2263/*===========================================================================
2264 * FUNCTION   : calcMaxJpegSize
2265 *
2266 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2267 *
2268 * PARAMETERS :
2269 *
2270 * RETURN     : max_jpeg_size
2271 *==========================================================================*/
2272int QCamera3HardwareInterface::calcMaxJpegSize()
2273{
2274    int32_t max_jpeg_size = 0;
2275    int temp_width, temp_height;
2276    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2277        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2278        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2279        if (temp_width * temp_height > max_jpeg_size ) {
2280            max_jpeg_size = temp_width * temp_height;
2281        }
2282    }
2283    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2284    return max_jpeg_size;
2285}
2286
2287/*===========================================================================
2288 * FUNCTION   : initStaticMetadata
2289 *
2290 * DESCRIPTION: initialize the static metadata
2291 *
2292 * PARAMETERS :
2293 *   @cameraId  : camera Id
2294 *
2295 * RETURN     : int32_t type of status
2296 *              0  -- success
2297 *              non-zero failure code
2298 *==========================================================================*/
2299int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2300{
2301    int rc = 0;
2302    CameraMetadata staticInfo;
2303
2304    /* android.info: hardware level */
2305    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2306    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2307        &supportedHardwareLevel, 1);
2308
2309    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2310    /*HAL 3 only*/
2311    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2312                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2313
2314    /*hard coded for now but this should come from sensor*/
2315    float min_focus_distance;
2316    if(facingBack){
2317        min_focus_distance = 10;
2318    } else {
2319        min_focus_distance = 0;
2320    }
2321    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2322                    &min_focus_distance, 1);
2323
2324    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2325                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2326
2327    /*should be using focal lengths but sensor doesn't provide that info now*/
2328    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2329                      &gCamCapability[cameraId]->focal_length,
2330                      1);
2331
2332    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2333                      gCamCapability[cameraId]->apertures,
2334                      gCamCapability[cameraId]->apertures_count);
2335
2336    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2337                gCamCapability[cameraId]->filter_densities,
2338                gCamCapability[cameraId]->filter_densities_count);
2339
2340
2341    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2342                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2343                      gCamCapability[cameraId]->optical_stab_modes_count);
2344
2345    staticInfo.update(ANDROID_LENS_POSITION,
2346                      gCamCapability[cameraId]->lens_position,
2347                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2348
2349    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2350                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2351    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2352                      lens_shading_map_size,
2353                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2354
2355    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2356                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2357    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2358            geo_correction_map_size,
2359            sizeof(geo_correction_map_size)/sizeof(int32_t));
2360
2361    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2362                       gCamCapability[cameraId]->geo_correction_map,
2363                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2364
2365    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2366            gCamCapability[cameraId]->sensor_physical_size, 2);
2367
2368    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2369            gCamCapability[cameraId]->exposure_time_range, 2);
2370
2371    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2372            &gCamCapability[cameraId]->max_frame_duration, 1);
2373
2374    camera_metadata_rational baseGainFactor = {
2375            gCamCapability[cameraId]->base_gain_factor.numerator,
2376            gCamCapability[cameraId]->base_gain_factor.denominator};
2377    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2378                      &baseGainFactor, 1);
2379
2380    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2381                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2382
2383    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2384                                               gCamCapability[cameraId]->pixel_array_size.height};
2385    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2386                      pixel_array_size, 2);
2387
2388    int32_t active_array_size[] = {0, 0,
2389                                                gCamCapability[cameraId]->active_array_size.width,
2390                                                gCamCapability[cameraId]->active_array_size.height};
2391    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2392                      active_array_size, 4);
2393
2394    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2395            &gCamCapability[cameraId]->white_level, 1);
2396
2397    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2398            gCamCapability[cameraId]->black_level_pattern, 4);
2399
2400    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2401                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2402
2403    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2404                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2405
2406    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2407                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2408
2409    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2410                      &gCamCapability[cameraId]->histogram_size, 1);
2411
2412    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2413            &gCamCapability[cameraId]->max_histogram_count, 1);
2414
2415    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2416                                                gCamCapability[cameraId]->sharpness_map_size.height};
2417
2418    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2419            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2420
2421    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2422            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2423
2424
2425    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2426                      &gCamCapability[cameraId]->raw_min_duration,
2427                       1);
2428
2429    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2430                                                HAL_PIXEL_FORMAT_BLOB};
2431    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2432    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2433                      scalar_formats,
2434                      scalar_formats_count);
2435
2436    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2437    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2438              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2439              available_processed_sizes);
2440    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2441                available_processed_sizes,
2442                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2443
2444    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2445                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2446                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2447
2448    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2449    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2450                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2451                 available_fps_ranges);
2452    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2453            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2454
2455    camera_metadata_rational exposureCompensationStep = {
2456            gCamCapability[cameraId]->exp_compensation_step.numerator,
2457            gCamCapability[cameraId]->exp_compensation_step.denominator};
2458    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2459                      &exposureCompensationStep, 1);
2460
2461    /*TO DO*/
2462    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2463    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2464                      availableVstabModes, sizeof(availableVstabModes));
2465
2466    /*HAL 1 and HAL 3 common*/
2467    float maxZoom = 4;
2468    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2469            &maxZoom, 1);
2470
2471    int32_t max3aRegions = 1;
2472    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2473            &max3aRegions, 1);
2474
2475    uint8_t availableFaceDetectModes[] = {
2476            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2477            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2478    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2479                      availableFaceDetectModes,
2480                      sizeof(availableFaceDetectModes));
2481
2482    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2483                                                        gCamCapability[cameraId]->exposure_compensation_max};
2484    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2485            exposureCompensationRange,
2486            sizeof(exposureCompensationRange)/sizeof(int32_t));
2487
2488    uint8_t lensFacing = (facingBack) ?
2489            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2490    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2491
2492    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2493                available_processed_sizes,
2494                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2495
2496    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2497                      available_thumbnail_sizes,
2498                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2499
2500    int32_t max_jpeg_size = 0;
2501    int temp_width, temp_height;
2502    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2503        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2504        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2505        if (temp_width * temp_height > max_jpeg_size ) {
2506            max_jpeg_size = temp_width * temp_height;
2507        }
2508    }
2509    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2510    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2511                      &max_jpeg_size, 1);
2512
2513    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2514    int32_t size = 0;
2515    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2516        int val = lookupFwkName(EFFECT_MODES_MAP,
2517                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2518                                   gCamCapability[cameraId]->supported_effects[i]);
2519        if (val != NAME_NOT_FOUND) {
2520            avail_effects[size] = (uint8_t)val;
2521            size++;
2522        }
2523    }
2524    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2525                      avail_effects,
2526                      size);
2527
2528    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2529    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2530    int32_t supported_scene_modes_cnt = 0;
2531    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2532        int val = lookupFwkName(SCENE_MODES_MAP,
2533                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2534                                gCamCapability[cameraId]->supported_scene_modes[i]);
2535        if (val != NAME_NOT_FOUND) {
2536            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2537            supported_indexes[supported_scene_modes_cnt] = i;
2538            supported_scene_modes_cnt++;
2539        }
2540    }
2541
2542    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2543                      avail_scene_modes,
2544                      supported_scene_modes_cnt);
2545
2546    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2547    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2548                      supported_scene_modes_cnt,
2549                      scene_mode_overrides,
2550                      supported_indexes,
2551                      cameraId);
2552    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2553                      scene_mode_overrides,
2554                      supported_scene_modes_cnt*3);
2555
2556    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2557    size = 0;
2558    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2559        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2560                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2561                                 gCamCapability[cameraId]->supported_antibandings[i]);
2562        if (val != NAME_NOT_FOUND) {
2563            avail_antibanding_modes[size] = (uint8_t)val;
2564            size++;
2565        }
2566
2567    }
2568    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2569                      avail_antibanding_modes,
2570                      size);
2571
2572    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2573    size = 0;
2574    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2575        int val = lookupFwkName(FOCUS_MODES_MAP,
2576                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2577                                gCamCapability[cameraId]->supported_focus_modes[i]);
2578        if (val != NAME_NOT_FOUND) {
2579            avail_af_modes[size] = (uint8_t)val;
2580            size++;
2581        }
2582    }
2583    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2584                      avail_af_modes,
2585                      size);
2586
2587    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2588    size = 0;
2589    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2590        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2591                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2592                                    gCamCapability[cameraId]->supported_white_balances[i]);
2593        if (val != NAME_NOT_FOUND) {
2594            avail_awb_modes[size] = (uint8_t)val;
2595            size++;
2596        }
2597    }
2598    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2599                      avail_awb_modes,
2600                      size);
2601
2602    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2603    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2604      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2605
2606    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2607            available_flash_levels,
2608            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2609
2610
2611    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2612    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2613            &flashAvailable, 1);
2614
2615    uint8_t avail_ae_modes[5];
2616    size = 0;
2617    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2618        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2619        size++;
2620    }
2621    if (flashAvailable) {
2622        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2623        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2624        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2625    }
2626    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2627                      avail_ae_modes,
2628                      size);
2629
2630    int32_t sensitivity_range[2];
2631    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2632    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2633    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2634                      sensitivity_range,
2635                      sizeof(sensitivity_range) / sizeof(int32_t));
2636
2637    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2638                      &gCamCapability[cameraId]->max_analog_sensitivity,
2639                      1);
2640
2641    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2642                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2643                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2644
2645    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2646    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2647                      &sensor_orientation,
2648                      1);
2649
2650    int32_t max_output_streams[3] = {1, 3, 1};
2651    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2652                      max_output_streams,
2653                      3);
2654
2655    gStaticMetadata[cameraId] = staticInfo.release();
2656    return rc;
2657}
2658
2659/*===========================================================================
2660 * FUNCTION   : makeTable
2661 *
2662 * DESCRIPTION: make a table of sizes
2663 *
2664 * PARAMETERS :
2665 *
2666 *
2667 *==========================================================================*/
2668void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2669                                          int32_t* sizeTable)
2670{
2671    int j = 0;
2672    for (int i = 0; i < size; i++) {
2673        sizeTable[j] = dimTable[i].width;
2674        sizeTable[j+1] = dimTable[i].height;
2675        j+=2;
2676    }
2677}
2678
2679/*===========================================================================
2680 * FUNCTION   : makeFPSTable
2681 *
2682 * DESCRIPTION: make a table of fps ranges
2683 *
2684 * PARAMETERS :
2685 *
2686 *==========================================================================*/
2687void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2688                                          int32_t* fpsRangesTable)
2689{
2690    int j = 0;
2691    for (int i = 0; i < size; i++) {
2692        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2693        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2694        j+=2;
2695    }
2696}
2697
2698/*===========================================================================
2699 * FUNCTION   : makeOverridesList
2700 *
2701 * DESCRIPTION: make a list of scene mode overrides
2702 *
2703 * PARAMETERS :
2704 *
2705 *
2706 *==========================================================================*/
2707void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2708                                                  uint8_t size, uint8_t* overridesList,
2709                                                  uint8_t* supported_indexes,
2710                                                  int camera_id)
2711{
2712    /*daemon will give a list of overrides for all scene modes.
2713      However we should send the fwk only the overrides for the scene modes
2714      supported by the framework*/
2715    int j = 0, index = 0, supt = 0;
2716    uint8_t focus_override;
2717    for (int i = 0; i < size; i++) {
2718        supt = 0;
2719        index = supported_indexes[i];
2720        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2721        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2722                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2723                                                    overridesTable[index].awb_mode);
2724        focus_override = (uint8_t)overridesTable[index].af_mode;
2725        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2726           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2727              supt = 1;
2728              break;
2729           }
2730        }
2731        if (supt) {
2732           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2733                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2734                                              focus_override);
2735        } else {
2736           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2737        }
2738        j+=3;
2739    }
2740}
2741
2742/*===========================================================================
2743 * FUNCTION   : getPreviewHalPixelFormat
2744 *
2745 * DESCRIPTION: convert the format to type recognized by framework
2746 *
2747 * PARAMETERS : format : the format from backend
2748 *
2749 ** RETURN    : format recognized by framework
2750 *
2751 *==========================================================================*/
2752int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2753{
2754    int32_t halPixelFormat;
2755
2756    switch (format) {
2757    case CAM_FORMAT_YUV_420_NV12:
2758        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2759        break;
2760    case CAM_FORMAT_YUV_420_NV21:
2761        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2762        break;
2763    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2764        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2765        break;
2766    case CAM_FORMAT_YUV_420_YV12:
2767        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2768        break;
2769    case CAM_FORMAT_YUV_422_NV16:
2770    case CAM_FORMAT_YUV_422_NV61:
2771    default:
2772        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2773        break;
2774    }
2775    return halPixelFormat;
2776}
2777
2778/*===========================================================================
2779 * FUNCTION   : getSensorSensitivity
2780 *
2781 * DESCRIPTION: convert iso_mode to an integer value
2782 *
2783 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2784 *
2785 ** RETURN    : sensitivity supported by sensor
2786 *
2787 *==========================================================================*/
2788int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2789{
2790    int32_t sensitivity;
2791
2792    switch (iso_mode) {
2793    case CAM_ISO_MODE_100:
2794        sensitivity = 100;
2795        break;
2796    case CAM_ISO_MODE_200:
2797        sensitivity = 200;
2798        break;
2799    case CAM_ISO_MODE_400:
2800        sensitivity = 400;
2801        break;
2802    case CAM_ISO_MODE_800:
2803        sensitivity = 800;
2804        break;
2805    case CAM_ISO_MODE_1600:
2806        sensitivity = 1600;
2807        break;
2808    default:
2809        sensitivity = -1;
2810        break;
2811    }
2812    return sensitivity;
2813}
2814
2815
2816/*===========================================================================
2817 * FUNCTION   : AddSetParmEntryToBatch
2818 *
2819 * DESCRIPTION: add set parameter entry into batch
2820 *
2821 * PARAMETERS :
2822 *   @p_table     : ptr to parameter buffer
2823 *   @paramType   : parameter type
2824 *   @paramLength : length of parameter value
2825 *   @paramValue  : ptr to parameter value
2826 *
2827 * RETURN     : int32_t type of status
2828 *              NO_ERROR  -- success
2829 *              none-zero failure code
2830 *==========================================================================*/
2831int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2832                                                          cam_intf_parm_type_t paramType,
2833                                                          uint32_t paramLength,
2834                                                          void *paramValue)
2835{
2836    int position = paramType;
2837    int current, next;
2838
2839    /*************************************************************************
2840    *                 Code to take care of linking next flags                *
2841    *************************************************************************/
2842    current = GET_FIRST_PARAM_ID(p_table);
2843    if (position == current){
2844        //DO NOTHING
2845    } else if (position < current){
2846        SET_NEXT_PARAM_ID(position, p_table, current);
2847        SET_FIRST_PARAM_ID(p_table, position);
2848    } else {
2849        /* Search for the position in the linked list where we need to slot in*/
2850        while (position > GET_NEXT_PARAM_ID(current, p_table))
2851            current = GET_NEXT_PARAM_ID(current, p_table);
2852
2853        /*If node already exists no need to alter linking*/
2854        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2855            next = GET_NEXT_PARAM_ID(current, p_table);
2856            SET_NEXT_PARAM_ID(current, p_table, position);
2857            SET_NEXT_PARAM_ID(position, p_table, next);
2858        }
2859    }
2860
2861    /*************************************************************************
2862    *                   Copy contents into entry                             *
2863    *************************************************************************/
2864
2865    if (paramLength > sizeof(parm_type_t)) {
2866        ALOGE("%s:Size of input larger than max entry size",__func__);
2867        return BAD_VALUE;
2868    }
2869    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2870    return NO_ERROR;
2871}
2872
2873/*===========================================================================
2874 * FUNCTION   : lookupFwkName
2875 *
2876 * DESCRIPTION: In case the enum is not same in fwk and backend
2877 *              make sure the parameter is correctly propogated
2878 *
2879 * PARAMETERS  :
2880 *   @arr      : map between the two enums
2881 *   @len      : len of the map
2882 *   @hal_name : name of the hal_parm to map
2883 *
2884 * RETURN     : int type of status
2885 *              fwk_name  -- success
2886 *              none-zero failure code
2887 *==========================================================================*/
2888int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2889                                             int len, int hal_name)
2890{
2891
2892    for (int i = 0; i < len; i++) {
2893        if (arr[i].hal_name == hal_name)
2894            return arr[i].fwk_name;
2895    }
2896
2897    /* Not able to find matching framework type is not necessarily
2898     * an error case. This happens when mm-camera supports more attributes
2899     * than the frameworks do */
2900    ALOGD("%s: Cannot find matching framework type", __func__);
2901    return NAME_NOT_FOUND;
2902}
2903
2904/*===========================================================================
2905 * FUNCTION   : lookupHalName
2906 *
2907 * DESCRIPTION: In case the enum is not same in fwk and backend
2908 *              make sure the parameter is correctly propogated
2909 *
2910 * PARAMETERS  :
2911 *   @arr      : map between the two enums
2912 *   @len      : len of the map
2913 *   @fwk_name : name of the hal_parm to map
2914 *
2915 * RETURN     : int32_t type of status
2916 *              hal_name  -- success
2917 *              none-zero failure code
2918 *==========================================================================*/
2919int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2920                                             int len, int fwk_name)
2921{
2922    for (int i = 0; i < len; i++) {
2923       if (arr[i].fwk_name == fwk_name)
2924           return arr[i].hal_name;
2925    }
2926    ALOGE("%s: Cannot find matching hal type", __func__);
2927    return NAME_NOT_FOUND;
2928}
2929
2930/*===========================================================================
2931 * FUNCTION   : getCapabilities
2932 *
2933 * DESCRIPTION: query camera capabilities
2934 *
2935 * PARAMETERS :
2936 *   @cameraId  : camera Id
2937 *   @info      : camera info struct to be filled in with camera capabilities
2938 *
2939 * RETURN     : int32_t type of status
2940 *              NO_ERROR  -- success
2941 *              none-zero failure code
2942 *==========================================================================*/
2943int QCamera3HardwareInterface::getCamInfo(int cameraId,
2944                                    struct camera_info *info)
2945{
2946    int rc = 0;
2947
2948    if (NULL == gCamCapability[cameraId]) {
2949        rc = initCapabilities(cameraId);
2950        if (rc < 0) {
2951            //pthread_mutex_unlock(&g_camlock);
2952            return rc;
2953        }
2954    }
2955
2956    if (NULL == gStaticMetadata[cameraId]) {
2957        rc = initStaticMetadata(cameraId);
2958        if (rc < 0) {
2959            return rc;
2960        }
2961    }
2962
2963    switch(gCamCapability[cameraId]->position) {
2964    case CAM_POSITION_BACK:
2965        info->facing = CAMERA_FACING_BACK;
2966        break;
2967
2968    case CAM_POSITION_FRONT:
2969        info->facing = CAMERA_FACING_FRONT;
2970        break;
2971
2972    default:
2973        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2974        rc = -1;
2975        break;
2976    }
2977
2978
2979    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2980    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2981    info->static_camera_characteristics = gStaticMetadata[cameraId];
2982
2983    return rc;
2984}
2985
2986/*===========================================================================
2987 * FUNCTION   : translateMetadata
2988 *
2989 * DESCRIPTION: translate the metadata into camera_metadata_t
2990 *
2991 * PARAMETERS : type of the request
2992 *
2993 *
2994 * RETURN     : success: camera_metadata_t*
2995 *              failure: NULL
2996 *
2997 *==========================================================================*/
2998camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2999{
3000    pthread_mutex_lock(&mMutex);
3001
3002    if (mDefaultMetadata[type] != NULL) {
3003        pthread_mutex_unlock(&mMutex);
3004        return mDefaultMetadata[type];
3005    }
3006    //first time we are handling this request
3007    //fill up the metadata structure using the wrapper class
3008    CameraMetadata settings;
3009    //translate from cam_capability_t to camera_metadata_tag_t
3010    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3011    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3012    int32_t defaultRequestID = 0;
3013    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3014
3015    /*control*/
3016
3017    uint8_t controlIntent = 0;
3018    switch (type) {
3019      case CAMERA3_TEMPLATE_PREVIEW:
3020        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3021        break;
3022      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3023        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3024        break;
3025      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3026        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3027        break;
3028      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3029        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3030        break;
3031      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3032        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3033        break;
3034      default:
3035        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3036        break;
3037    }
3038    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3039
3040    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3041            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3042
3043    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3044    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3045
3046    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3047    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3048
3049    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3050    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3051
3052    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3053    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3054
3055    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3056    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3057
3058    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
3059    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3060
3061    static uint8_t focusMode;
3062    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
3063        ALOGE("%s: Setting focus mode to auto", __func__);
3064        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
3065    } else {
3066        ALOGE("%s: Setting focus mode to off", __func__);
3067        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3068    }
3069    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3070
3071    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3072    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3073
3074    /*flash*/
3075    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3076    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3077
3078    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3079    settings.update(ANDROID_FLASH_FIRING_POWER,
3080            &flashFiringLevel, 1);
3081
3082    /* lens */
3083    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3084    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3085
3086    if (gCamCapability[mCameraId]->filter_densities_count) {
3087        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3088        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3089                        gCamCapability[mCameraId]->filter_densities_count);
3090    }
3091
3092    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3093    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3094
3095    /* Exposure time(Update the Min Exposure Time)*/
3096    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3097    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3098
3099    /* frame duration */
3100    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3101    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3102
3103    /* sensitivity */
3104    static const int32_t default_sensitivity = 100;
3105    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3106
3107    /*edge mode*/
3108    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3109    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3110
3111    /*noise reduction mode*/
3112    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3113    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3114
3115    /*color correction mode*/
3116    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3117    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3118
3119    /*transform matrix mode*/
3120    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3121    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3122
3123    int32_t edge_strength = gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3124    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3125
3126    int32_t scaler_crop_region[4];
3127    scaler_crop_region[0] = 0;
3128    scaler_crop_region[1] = 0;
3129    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3130    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3131    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3132
3133    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3134    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3135
3136    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3137    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3138
3139    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3140                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3141                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3142    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3143
3144    mDefaultMetadata[type] = settings.release();
3145
3146    pthread_mutex_unlock(&mMutex);
3147    return mDefaultMetadata[type];
3148}
3149
3150/*===========================================================================
3151 * FUNCTION   : setFrameParameters
3152 *
3153 * DESCRIPTION: set parameters per frame as requested in the metadata from
3154 *              framework
3155 *
3156 * PARAMETERS :
3157 *   @request   : request that needs to be serviced
3158 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3159 *
3160 * RETURN     : success: NO_ERROR
3161 *              failure:
3162 *==========================================================================*/
3163int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3164                    uint32_t streamTypeMask)
3165{
3166    /*translate from camera_metadata_t type to parm_type_t*/
3167    int rc = 0;
3168    if (request->settings == NULL && mFirstRequest) {
3169        /*settings cannot be null for the first request*/
3170        return BAD_VALUE;
3171    }
3172
3173    int32_t hal_version = CAM_HAL_V3;
3174
3175    memset(mParameters, 0, sizeof(parm_buffer_t));
3176    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3177    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3178                sizeof(hal_version), &hal_version);
3179    if (rc < 0) {
3180        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3181        return BAD_VALUE;
3182    }
3183
3184    /*we need to update the frame number in the parameters*/
3185    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3186                                sizeof(request->frame_number), &(request->frame_number));
3187    if (rc < 0) {
3188        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3189        return BAD_VALUE;
3190    }
3191
3192    /* Update stream id mask where buffers are requested */
3193    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3194                                sizeof(streamTypeMask), &streamTypeMask);
3195    if (rc < 0) {
3196        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3197        return BAD_VALUE;
3198    }
3199
3200    if(request->settings != NULL){
3201        rc = translateMetadataToParameters(request);
3202    }
3203    /*set the parameters to backend*/
3204    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3205    return rc;
3206}
3207
3208/*===========================================================================
3209 * FUNCTION   : translateMetadataToParameters
3210 *
3211 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3212 *
3213 *
3214 * PARAMETERS :
3215 *   @request  : request sent from framework
3216 *
3217 *
3218 * RETURN     : success: NO_ERROR
3219 *              failure:
3220 *==========================================================================*/
3221int QCamera3HardwareInterface::translateMetadataToParameters
3222                                  (const camera3_capture_request_t *request)
3223{
3224    int rc = 0;
3225    CameraMetadata frame_settings;
3226    frame_settings = request->settings;
3227
3228    /* Do not change the order of the following list unless you know what you are
3229     * doing.
3230     * The order is laid out in such a way that parameters in the front of the table
3231     * may be used to override the parameters later in the table. Examples are:
3232     * 1. META_MODE should precede AEC/AWB/AF MODE
3233     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3234     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3235     * 4. Any mode should precede it's corresponding settings
3236     */
3237    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3238        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3239        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3240                sizeof(metaMode), &metaMode);
3241        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3242           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3243           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3244                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3245                                             fwk_sceneMode);
3246           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3247                sizeof(sceneMode), &sceneMode);
3248        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3249           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3250           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3251                sizeof(sceneMode), &sceneMode);
3252        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3253           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3254           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3255                sizeof(sceneMode), &sceneMode);
3256        }
3257    }
3258
3259    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3260        uint8_t fwk_aeMode =
3261            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3262        uint8_t aeMode;
3263        int32_t redeye;
3264
3265        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3266            aeMode = CAM_AE_MODE_OFF;
3267        } else {
3268            aeMode = CAM_AE_MODE_ON;
3269        }
3270        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3271            redeye = 1;
3272        } else {
3273            redeye = 0;
3274        }
3275
3276        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3277                                          sizeof(AE_FLASH_MODE_MAP),
3278                                          fwk_aeMode);
3279        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3280                sizeof(aeMode), &aeMode);
3281        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3282                sizeof(flashMode), &flashMode);
3283        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3284                sizeof(redeye), &redeye);
3285    }
3286
3287    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3288        uint8_t fwk_whiteLevel =
3289            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3290        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3291                sizeof(WHITE_BALANCE_MODES_MAP),
3292                fwk_whiteLevel);
3293        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3294                sizeof(whiteLevel), &whiteLevel);
3295    }
3296
3297    float focalDistance = -1.0;
3298    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3299        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3300        rc = AddSetParmEntryToBatch(mParameters,
3301                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3302                sizeof(focalDistance), &focalDistance);
3303    }
3304
3305    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3306        uint8_t fwk_focusMode =
3307            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3308        uint8_t focusMode;
3309        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3310            focusMode = CAM_FOCUS_MODE_INFINITY;
3311        } else{
3312         focusMode = lookupHalName(FOCUS_MODES_MAP,
3313                                   sizeof(FOCUS_MODES_MAP),
3314                                   fwk_focusMode);
3315        }
3316        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3317                sizeof(focusMode), &focusMode);
3318    }
3319
3320    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3321        int32_t antibandingMode =
3322            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3323        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3324                sizeof(antibandingMode), &antibandingMode);
3325    }
3326
3327    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3328        int32_t expCompensation = frame_settings.find(
3329            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3330        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3331            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3332        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3333            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3334        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3335          sizeof(expCompensation), &expCompensation);
3336    }
3337
3338    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3339        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3340        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3341                sizeof(aeLock), &aeLock);
3342    }
3343    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3344        cam_fps_range_t fps_range;
3345        fps_range.min_fps =
3346            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3347        fps_range.max_fps =
3348            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3350                sizeof(fps_range), &fps_range);
3351    }
3352
3353    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3354        uint8_t awbLock =
3355            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3356        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3357                sizeof(awbLock), &awbLock);
3358    }
3359
3360    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3361        uint8_t fwk_effectMode =
3362            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3363        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3364                sizeof(EFFECT_MODES_MAP),
3365                fwk_effectMode);
3366        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3367                sizeof(effectMode), &effectMode);
3368    }
3369
3370    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3371        uint8_t colorCorrectMode =
3372            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3373        rc =
3374            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3375                    sizeof(colorCorrectMode), &colorCorrectMode);
3376    }
3377
3378    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3379        cam_color_correct_gains_t colorCorrectGains;
3380        for (int i = 0; i < 4; i++) {
3381            colorCorrectGains.gains[i] =
3382                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3383        }
3384        rc =
3385            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3386                    sizeof(colorCorrectGains), &colorCorrectGains);
3387    }
3388
3389    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3390        cam_color_correct_matrix_t colorCorrectTransform;
3391        cam_rational_type_t transform_elem;
3392        int num = 0;
3393        for (int i = 0; i < 3; i++) {
3394           for (int j = 0; j < 3; j++) {
3395              transform_elem.numerator =
3396                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3397              transform_elem.denominator =
3398                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3399              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3400              num++;
3401           }
3402        }
3403        rc =
3404            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3405                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3406    }
3407
3408    cam_trigger_t aecTrigger;
3409    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3410    aecTrigger.trigger_id = -1;
3411    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3412        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3413        aecTrigger.trigger =
3414            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3415        aecTrigger.trigger_id =
3416            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3417    }
3418    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3419                                sizeof(aecTrigger), &aecTrigger);
3420
3421    /*af_trigger must come with a trigger id*/
3422    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3423        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3424        cam_trigger_t af_trigger;
3425        af_trigger.trigger =
3426            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3427        af_trigger.trigger_id =
3428            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3429        rc = AddSetParmEntryToBatch(mParameters,
3430                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3431    }
3432
3433    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3434        int32_t demosaic =
3435            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3436        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3437                sizeof(demosaic), &demosaic);
3438    }
3439
3440    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3441        cam_edge_application_t edge_application;
3442        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3443        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3444            edge_application.sharpness = 0;
3445        } else {
3446            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3447                int32_t edgeStrength =
3448                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3449                edge_application.sharpness = edgeStrength;
3450            } else {
3451                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3452            }
3453        }
3454        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3455                sizeof(edge_application), &edge_application);
3456    }
3457
3458    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3459        int32_t respectFlashMode = 1;
3460        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3461            uint8_t fwk_aeMode =
3462                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3463            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3464                respectFlashMode = 0;
3465                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3466                    __func__);
3467            }
3468        }
3469        if (respectFlashMode) {
3470            uint8_t flashMode =
3471                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3472            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3473                                          sizeof(FLASH_MODES_MAP),
3474                                          flashMode);
3475            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3476            // To check: CAM_INTF_META_FLASH_MODE usage
3477            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3478                          sizeof(flashMode), &flashMode);
3479        }
3480    }
3481
3482    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3483        uint8_t flashPower =
3484            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3485        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3486                sizeof(flashPower), &flashPower);
3487    }
3488
3489    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3490        int64_t flashFiringTime =
3491            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3492        rc = AddSetParmEntryToBatch(mParameters,
3493                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3494    }
3495
3496    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3497        uint8_t geometricMode =
3498            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3499        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3500                sizeof(geometricMode), &geometricMode);
3501    }
3502
3503    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3504        uint8_t geometricStrength =
3505            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3506        rc = AddSetParmEntryToBatch(mParameters,
3507                CAM_INTF_META_GEOMETRIC_STRENGTH,
3508                sizeof(geometricStrength), &geometricStrength);
3509    }
3510
3511    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3512        uint8_t hotPixelMode =
3513            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3514        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3515                sizeof(hotPixelMode), &hotPixelMode);
3516    }
3517
3518    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3519        float lensAperture =
3520            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3521        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3522                sizeof(lensAperture), &lensAperture);
3523    }
3524
3525    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3526        float filterDensity =
3527            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3528        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3529                sizeof(filterDensity), &filterDensity);
3530    }
3531
3532    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3533        float focalLength =
3534            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3535        rc = AddSetParmEntryToBatch(mParameters,
3536                CAM_INTF_META_LENS_FOCAL_LENGTH,
3537                sizeof(focalLength), &focalLength);
3538    }
3539
3540    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3541        uint8_t optStabMode =
3542            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3543        rc = AddSetParmEntryToBatch(mParameters,
3544                CAM_INTF_META_LENS_OPT_STAB_MODE,
3545                sizeof(optStabMode), &optStabMode);
3546    }
3547
3548    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3549        uint8_t noiseRedMode =
3550            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3551        rc = AddSetParmEntryToBatch(mParameters,
3552                CAM_INTF_META_NOISE_REDUCTION_MODE,
3553                sizeof(noiseRedMode), &noiseRedMode);
3554    }
3555
3556    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3557        uint8_t noiseRedStrength =
3558            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3559        rc = AddSetParmEntryToBatch(mParameters,
3560                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3561                sizeof(noiseRedStrength), &noiseRedStrength);
3562    }
3563
3564    cam_crop_region_t scalerCropRegion;
3565    bool scalerCropSet = false;
3566    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3567        scalerCropRegion.left =
3568            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3569        scalerCropRegion.top =
3570            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3571        scalerCropRegion.width =
3572            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3573        scalerCropRegion.height =
3574            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3575        rc = AddSetParmEntryToBatch(mParameters,
3576                CAM_INTF_META_SCALER_CROP_REGION,
3577                sizeof(scalerCropRegion), &scalerCropRegion);
3578        scalerCropSet = true;
3579    }
3580
3581    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3582        int64_t sensorExpTime =
3583            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3584        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3585        rc = AddSetParmEntryToBatch(mParameters,
3586                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3587                sizeof(sensorExpTime), &sensorExpTime);
3588    }
3589
3590    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3591        int64_t sensorFrameDuration =
3592            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3593        int64_t minFrameDuration = getMinFrameDuration(request);
3594        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3595        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3596            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3597        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3598        rc = AddSetParmEntryToBatch(mParameters,
3599                CAM_INTF_META_SENSOR_FRAME_DURATION,
3600                sizeof(sensorFrameDuration), &sensorFrameDuration);
3601    }
3602
3603    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3604        int32_t sensorSensitivity =
3605            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3606        if (sensorSensitivity <
3607                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3608            sensorSensitivity =
3609                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3610        if (sensorSensitivity >
3611                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3612            sensorSensitivity =
3613                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3614        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3615        rc = AddSetParmEntryToBatch(mParameters,
3616                CAM_INTF_META_SENSOR_SENSITIVITY,
3617                sizeof(sensorSensitivity), &sensorSensitivity);
3618    }
3619
3620    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3621        int32_t shadingMode =
3622            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3623        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3624                sizeof(shadingMode), &shadingMode);
3625    }
3626
3627    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3628        uint8_t shadingStrength =
3629            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3630        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3631                sizeof(shadingStrength), &shadingStrength);
3632    }
3633
3634    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3635        uint8_t fwk_facedetectMode =
3636            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3637        uint8_t facedetectMode =
3638            lookupHalName(FACEDETECT_MODES_MAP,
3639                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3640        rc = AddSetParmEntryToBatch(mParameters,
3641                CAM_INTF_META_STATS_FACEDETECT_MODE,
3642                sizeof(facedetectMode), &facedetectMode);
3643    }
3644
3645    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3646        uint8_t histogramMode =
3647            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3648        rc = AddSetParmEntryToBatch(mParameters,
3649                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3650                sizeof(histogramMode), &histogramMode);
3651    }
3652
3653    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3654        uint8_t sharpnessMapMode =
3655            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3656        rc = AddSetParmEntryToBatch(mParameters,
3657                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3658                sizeof(sharpnessMapMode), &sharpnessMapMode);
3659    }
3660
3661    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3662        uint8_t tonemapMode =
3663            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3664        rc = AddSetParmEntryToBatch(mParameters,
3665                CAM_INTF_META_TONEMAP_MODE,
3666                sizeof(tonemapMode), &tonemapMode);
3667    }
3668    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3669    /*All tonemap channels will have the same number of points*/
3670    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3671        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3672        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3673        cam_rgb_tonemap_curves tonemapCurves;
3674        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3675
3676        /* ch0 = G*/
3677        int point = 0;
3678        cam_tonemap_curve_t tonemapCurveGreen;
3679        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3680            for (int j = 0; j < 2; j++) {
3681               tonemapCurveGreen.tonemap_points[i][j] =
3682                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3683               point++;
3684            }
3685        }
3686        tonemapCurves.curves[0] = tonemapCurveGreen;
3687
3688        /* ch 1 = B */
3689        point = 0;
3690        cam_tonemap_curve_t tonemapCurveBlue;
3691        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3692            for (int j = 0; j < 2; j++) {
3693               tonemapCurveBlue.tonemap_points[i][j] =
3694                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3695               point++;
3696            }
3697        }
3698        tonemapCurves.curves[1] = tonemapCurveBlue;
3699
3700        /* ch 2 = R */
3701        point = 0;
3702        cam_tonemap_curve_t tonemapCurveRed;
3703        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3704            for (int j = 0; j < 2; j++) {
3705               tonemapCurveRed.tonemap_points[i][j] =
3706                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3707               point++;
3708            }
3709        }
3710        tonemapCurves.curves[2] = tonemapCurveRed;
3711
3712        rc = AddSetParmEntryToBatch(mParameters,
3713                CAM_INTF_META_TONEMAP_CURVES,
3714                sizeof(tonemapCurves), &tonemapCurves);
3715    }
3716
3717    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3718        uint8_t captureIntent =
3719            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3720        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3721                sizeof(captureIntent), &captureIntent);
3722    }
3723
3724    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3725        uint8_t blackLevelLock =
3726            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3727        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3728                sizeof(blackLevelLock), &blackLevelLock);
3729    }
3730
3731    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3732        uint8_t lensShadingMapMode =
3733            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3734        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3735                sizeof(lensShadingMapMode), &lensShadingMapMode);
3736    }
3737
3738    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3739        cam_area_t roi;
3740        bool reset = true;
3741        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3742        if (scalerCropSet) {
3743            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3744        }
3745        if (reset) {
3746            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3747                    sizeof(roi), &roi);
3748        }
3749    }
3750
3751    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3752        cam_area_t roi;
3753        bool reset = true;
3754        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3755        if (scalerCropSet) {
3756            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3757        }
3758        if (reset) {
3759            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3760                    sizeof(roi), &roi);
3761        }
3762    }
3763
3764    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3765        cam_area_t roi;
3766        bool reset = true;
3767        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3768        if (scalerCropSet) {
3769            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3770        }
3771        if (reset) {
3772            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3773                    sizeof(roi), &roi);
3774        }
3775    }
3776    return rc;
3777}
3778
3779/*===========================================================================
3780 * FUNCTION   : getJpegSettings
3781 *
3782 * DESCRIPTION: save the jpeg settings in the HAL
3783 *
3784 *
3785 * PARAMETERS :
3786 *   @settings  : frame settings information from framework
3787 *
3788 *
3789 * RETURN     : success: NO_ERROR
3790 *              failure:
3791 *==========================================================================*/
3792int QCamera3HardwareInterface::getJpegSettings
3793                                  (const camera_metadata_t *settings)
3794{
3795    if (mJpegSettings) {
3796        if (mJpegSettings->gps_timestamp) {
3797            free(mJpegSettings->gps_timestamp);
3798            mJpegSettings->gps_timestamp = NULL;
3799        }
3800        if (mJpegSettings->gps_coordinates) {
3801            for (int i = 0; i < 3; i++) {
3802                free(mJpegSettings->gps_coordinates[i]);
3803                mJpegSettings->gps_coordinates[i] = NULL;
3804            }
3805        }
3806        free(mJpegSettings);
3807        mJpegSettings = NULL;
3808    }
3809    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3810    CameraMetadata jpeg_settings;
3811    jpeg_settings = settings;
3812
3813    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3814        mJpegSettings->jpeg_orientation =
3815            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3816    } else {
3817        mJpegSettings->jpeg_orientation = 0;
3818    }
3819    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3820        mJpegSettings->jpeg_quality =
3821            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3822    } else {
3823        mJpegSettings->jpeg_quality = 85;
3824    }
3825    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3826        mJpegSettings->thumbnail_size.width =
3827            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3828        mJpegSettings->thumbnail_size.height =
3829            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3830    } else {
3831        mJpegSettings->thumbnail_size.width = 0;
3832        mJpegSettings->thumbnail_size.height = 0;
3833    }
3834    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3835        for (int i = 0; i < 3; i++) {
3836            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3837            *(mJpegSettings->gps_coordinates[i]) =
3838                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3839        }
3840    } else{
3841       for (int i = 0; i < 3; i++) {
3842            mJpegSettings->gps_coordinates[i] = NULL;
3843        }
3844    }
3845
3846    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3847        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3848        *(mJpegSettings->gps_timestamp) =
3849            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3850    } else {
3851        mJpegSettings->gps_timestamp = NULL;
3852    }
3853
3854    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3855        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3856        for (int i = 0; i < len; i++) {
3857            mJpegSettings->gps_processing_method[i] =
3858                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3859        }
3860        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3861            mJpegSettings->gps_processing_method[len] = '\0';
3862        }
3863    } else {
3864        mJpegSettings->gps_processing_method[0] = '\0';
3865    }
3866
3867    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3868        mJpegSettings->sensor_sensitivity =
3869            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3870    } else {
3871        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3872    }
3873
3874    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3875
3876    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3877        mJpegSettings->lens_focal_length =
3878            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3879    }
3880    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3881        mJpegSettings->exposure_compensation =
3882            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3883    }
3884    mJpegSettings->sharpness = 10; //default value
3885    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3886        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3887        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3888            mJpegSettings->sharpness = 0;
3889        }
3890    }
3891    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3892    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3893    mJpegSettings->is_jpeg_format = true;
3894    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3895    return 0;
3896}
3897
3898/*===========================================================================
3899 * FUNCTION   : captureResultCb
3900 *
3901 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3902 *
3903 * PARAMETERS :
3904 *   @frame  : frame information from mm-camera-interface
3905 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3906 *   @userdata: userdata
3907 *
3908 * RETURN     : NONE
3909 *==========================================================================*/
3910void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3911                camera3_stream_buffer_t *buffer,
3912                uint32_t frame_number, void *userdata)
3913{
3914    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3915    if (hw == NULL) {
3916        ALOGE("%s: Invalid hw %p", __func__, hw);
3917        return;
3918    }
3919
3920    hw->captureResultCb(metadata, buffer, frame_number);
3921    return;
3922}
3923
3924
3925/*===========================================================================
3926 * FUNCTION   : initialize
3927 *
3928 * DESCRIPTION: Pass framework callback pointers to HAL
3929 *
3930 * PARAMETERS :
3931 *
3932 *
3933 * RETURN     : Success : 0
3934 *              Failure: -ENODEV
3935 *==========================================================================*/
3936
3937int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3938                                  const camera3_callback_ops_t *callback_ops)
3939{
3940    ALOGV("%s: E", __func__);
3941    QCamera3HardwareInterface *hw =
3942        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3943    if (!hw) {
3944        ALOGE("%s: NULL camera device", __func__);
3945        return -ENODEV;
3946    }
3947
3948    int rc = hw->initialize(callback_ops);
3949    ALOGV("%s: X", __func__);
3950    return rc;
3951}
3952
3953/*===========================================================================
3954 * FUNCTION   : configure_streams
3955 *
3956 * DESCRIPTION:
3957 *
3958 * PARAMETERS :
3959 *
3960 *
3961 * RETURN     : Success: 0
3962 *              Failure: -EINVAL (if stream configuration is invalid)
3963 *                       -ENODEV (fatal error)
3964 *==========================================================================*/
3965
3966int QCamera3HardwareInterface::configure_streams(
3967        const struct camera3_device *device,
3968        camera3_stream_configuration_t *stream_list)
3969{
3970    ALOGV("%s: E", __func__);
3971    QCamera3HardwareInterface *hw =
3972        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3973    if (!hw) {
3974        ALOGE("%s: NULL camera device", __func__);
3975        return -ENODEV;
3976    }
3977    int rc = hw->configureStreams(stream_list);
3978    ALOGV("%s: X", __func__);
3979    return rc;
3980}
3981
3982/*===========================================================================
3983 * FUNCTION   : register_stream_buffers
3984 *
3985 * DESCRIPTION: Register stream buffers with the device
3986 *
3987 * PARAMETERS :
3988 *
3989 * RETURN     :
3990 *==========================================================================*/
3991int QCamera3HardwareInterface::register_stream_buffers(
3992        const struct camera3_device *device,
3993        const camera3_stream_buffer_set_t *buffer_set)
3994{
3995    ALOGV("%s: E", __func__);
3996    QCamera3HardwareInterface *hw =
3997        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3998    if (!hw) {
3999        ALOGE("%s: NULL camera device", __func__);
4000        return -ENODEV;
4001    }
4002    int rc = hw->registerStreamBuffers(buffer_set);
4003    ALOGV("%s: X", __func__);
4004    return rc;
4005}
4006
4007/*===========================================================================
4008 * FUNCTION   : construct_default_request_settings
4009 *
4010 * DESCRIPTION: Configure a settings buffer to meet the required use case
4011 *
4012 * PARAMETERS :
4013 *
4014 *
4015 * RETURN     : Success: Return valid metadata
4016 *              Failure: Return NULL
4017 *==========================================================================*/
4018const camera_metadata_t* QCamera3HardwareInterface::
4019    construct_default_request_settings(const struct camera3_device *device,
4020                                        int type)
4021{
4022
4023    ALOGV("%s: E", __func__);
4024    camera_metadata_t* fwk_metadata = NULL;
4025    QCamera3HardwareInterface *hw =
4026        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4027    if (!hw) {
4028        ALOGE("%s: NULL camera device", __func__);
4029        return NULL;
4030    }
4031
4032    fwk_metadata = hw->translateCapabilityToMetadata(type);
4033
4034    ALOGV("%s: X", __func__);
4035    return fwk_metadata;
4036}
4037
4038/*===========================================================================
4039 * FUNCTION   : process_capture_request
4040 *
4041 * DESCRIPTION:
4042 *
4043 * PARAMETERS :
4044 *
4045 *
4046 * RETURN     :
4047 *==========================================================================*/
4048int QCamera3HardwareInterface::process_capture_request(
4049                    const struct camera3_device *device,
4050                    camera3_capture_request_t *request)
4051{
4052    ALOGV("%s: E", __func__);
4053    QCamera3HardwareInterface *hw =
4054        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4055    if (!hw) {
4056        ALOGE("%s: NULL camera device", __func__);
4057        return -EINVAL;
4058    }
4059
4060    int rc = hw->processCaptureRequest(request);
4061    ALOGV("%s: X", __func__);
4062    return rc;
4063}
4064
4065/*===========================================================================
4066 * FUNCTION   : get_metadata_vendor_tag_ops
4067 *
4068 * DESCRIPTION:
4069 *
4070 * PARAMETERS :
4071 *
4072 *
4073 * RETURN     :
4074 *==========================================================================*/
4075
4076void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4077                const struct camera3_device *device,
4078                vendor_tag_query_ops_t* ops)
4079{
4080    ALOGV("%s: E", __func__);
4081    QCamera3HardwareInterface *hw =
4082        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4083    if (!hw) {
4084        ALOGE("%s: NULL camera device", __func__);
4085        return;
4086    }
4087
4088    hw->getMetadataVendorTagOps(ops);
4089    ALOGV("%s: X", __func__);
4090    return;
4091}
4092
4093/*===========================================================================
4094 * FUNCTION   : dump
4095 *
4096 * DESCRIPTION:
4097 *
4098 * PARAMETERS :
4099 *
4100 *
4101 * RETURN     :
4102 *==========================================================================*/
4103
4104void QCamera3HardwareInterface::dump(
4105                const struct camera3_device *device, int fd)
4106{
4107    ALOGV("%s: E", __func__);
4108    QCamera3HardwareInterface *hw =
4109        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4110    if (!hw) {
4111        ALOGE("%s: NULL camera device", __func__);
4112        return;
4113    }
4114
4115    hw->dump(fd);
4116    ALOGV("%s: X", __func__);
4117    return;
4118}
4119
4120/*===========================================================================
4121 * FUNCTION   : flush
4122 *
4123 * DESCRIPTION:
4124 *
4125 * PARAMETERS :
4126 *
4127 *
4128 * RETURN     :
4129 *==========================================================================*/
4130
4131int QCamera3HardwareInterface::flush(
4132                const struct camera3_device *device)
4133{
4134    int rc;
4135    ALOGV("%s: E", __func__);
4136    QCamera3HardwareInterface *hw =
4137        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4138    if (!hw) {
4139        ALOGE("%s: NULL camera device", __func__);
4140        return -EINVAL;
4141    }
4142
4143    rc = hw->flush();
4144    ALOGV("%s: X", __func__);
4145    return rc;
4146}
4147
4148/*===========================================================================
4149 * FUNCTION   : close_camera_device
4150 *
4151 * DESCRIPTION:
4152 *
4153 * PARAMETERS :
4154 *
4155 *
4156 * RETURN     :
4157 *==========================================================================*/
4158int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4159{
4160    ALOGV("%s: E", __func__);
4161    int ret = NO_ERROR;
4162    QCamera3HardwareInterface *hw =
4163        reinterpret_cast<QCamera3HardwareInterface *>(
4164            reinterpret_cast<camera3_device_t *>(device)->priv);
4165    if (!hw) {
4166        ALOGE("NULL camera device");
4167        return BAD_VALUE;
4168    }
4169    delete hw;
4170
4171    pthread_mutex_lock(&mCameraSessionLock);
4172    mCameraSessionActive = 0;
4173    pthread_mutex_unlock(&mCameraSessionLock);
4174    ALOGV("%s: X", __func__);
4175    return ret;
4176}
4177
4178/*===========================================================================
4179 * FUNCTION   : getWaveletDenoiseProcessPlate
4180 *
4181 * DESCRIPTION: query wavelet denoise process plate
4182 *
4183 * PARAMETERS : None
4184 *
4185 * RETURN     : WNR prcocess plate vlaue
4186 *==========================================================================*/
4187cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4188{
4189    char prop[PROPERTY_VALUE_MAX];
4190    memset(prop, 0, sizeof(prop));
4191    property_get("persist.denoise.process.plates", prop, "0");
4192    int processPlate = atoi(prop);
4193    switch(processPlate) {
4194    case 0:
4195        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4196    case 1:
4197        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4198    case 2:
4199        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4200    case 3:
4201        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4202    default:
4203        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4204    }
4205}
4206
4207/*===========================================================================
4208 * FUNCTION   : needRotationReprocess
4209 *
4210 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4211 *
4212 * PARAMETERS : none
4213 *
4214 * RETURN     : true: needed
4215 *              false: no need
4216 *==========================================================================*/
4217bool QCamera3HardwareInterface::needRotationReprocess()
4218{
4219
4220    if (!mJpegSettings->is_jpeg_format) {
4221        // RAW image, no need to reprocess
4222        return false;
4223    }
4224
4225    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4226        mJpegSettings->jpeg_orientation > 0) {
4227        // current rotation is not zero, and pp has the capability to process rotation
4228        ALOGD("%s: need do reprocess for rotation", __func__);
4229        return true;
4230    }
4231
4232    return false;
4233}
4234
4235/*===========================================================================
4236 * FUNCTION   : needReprocess
4237 *
4238 * DESCRIPTION: if reprocess in needed
4239 *
4240 * PARAMETERS : none
4241 *
4242 * RETURN     : true: needed
4243 *              false: no need
4244 *==========================================================================*/
4245bool QCamera3HardwareInterface::needReprocess()
4246{
4247    if (!mJpegSettings->is_jpeg_format) {
4248        // RAW image, no need to reprocess
4249        return false;
4250    }
4251
4252    if ((mJpegSettings->min_required_pp_mask > 0) ||
4253         isWNREnabled()) {
4254        // TODO: add for ZSL HDR later
4255        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4256        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4257        return true;
4258    }
4259    return needRotationReprocess();
4260}
4261
4262/*===========================================================================
4263 * FUNCTION   : addOnlineReprocChannel
4264 *
4265 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4266 *              coming from input channel
4267 *
4268 * PARAMETERS :
4269 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4270 *
4271 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4272 *==========================================================================*/
4273QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4274              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4275{
4276    int32_t rc = NO_ERROR;
4277    QCamera3ReprocessChannel *pChannel = NULL;
4278    if (pInputChannel == NULL) {
4279        ALOGE("%s: input channel obj is NULL", __func__);
4280        return NULL;
4281    }
4282
4283    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4284            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4285    if (NULL == pChannel) {
4286        ALOGE("%s: no mem for reprocess channel", __func__);
4287        return NULL;
4288    }
4289
4290    // Capture channel, only need snapshot and postview streams start together
4291    mm_camera_channel_attr_t attr;
4292    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4293    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4294    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4295    rc = pChannel->initialize();
4296    if (rc != NO_ERROR) {
4297        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4298        delete pChannel;
4299        return NULL;
4300    }
4301
4302    // pp feature config
4303    cam_pp_feature_config_t pp_config;
4304    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4305    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4306        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4307        pp_config.sharpness = mJpegSettings->sharpness;
4308    }
4309
4310    if (isWNREnabled()) {
4311        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4312        pp_config.denoise2d.denoise_enable = 1;
4313        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4314    }
4315    if (needRotationReprocess()) {
4316        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4317        int rotation = mJpegSettings->jpeg_orientation;
4318        if (rotation == 0) {
4319            pp_config.rotation = ROTATE_0;
4320        } else if (rotation == 90) {
4321            pp_config.rotation = ROTATE_90;
4322        } else if (rotation == 180) {
4323            pp_config.rotation = ROTATE_180;
4324        } else if (rotation == 270) {
4325            pp_config.rotation = ROTATE_270;
4326        }
4327    }
4328
4329   rc = pChannel->addReprocStreamsFromSource(pp_config,
4330                                             pInputChannel,
4331                                             mMetadataChannel);
4332
4333    if (rc != NO_ERROR) {
4334        delete pChannel;
4335        return NULL;
4336    }
4337    return pChannel;
4338}
4339
4340int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4341{
4342    return gCamCapability[mCameraId]->min_num_pp_bufs;
4343}
4344
4345bool QCamera3HardwareInterface::isWNREnabled() {
4346    return gCamCapability[mCameraId]->isWnrSupported;
4347}
4348
4349}; //end namespace qcamera
4350