QCamera3HWI.cpp revision 9bdb10cd8e81bd31408146118ef57bdea6e79891
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
87    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
88    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
89    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
91    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
92    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
93    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
94    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
95    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
96    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
97    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
98    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
99    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
100    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
101    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
105    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
106    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
107    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
108    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
110    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
111};
112
113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
121    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
124    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
126};
127
128const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
129    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
130    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
131    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
132};
133
134const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
136    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
137};
138
139const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
140                                             320, 240, 176, 144, 0, 0};
141
142camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
143    initialize:                         QCamera3HardwareInterface::initialize,
144    configure_streams:                  QCamera3HardwareInterface::configure_streams,
145    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
146    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
147    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
148    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
149    dump:                               QCamera3HardwareInterface::dump,
150    flush:                              QCamera3HardwareInterface::flush,
151    reserved:                           {0},
152};
153
154
155/*===========================================================================
156 * FUNCTION   : QCamera3HardwareInterface
157 *
158 * DESCRIPTION: constructor of QCamera3HardwareInterface
159 *
160 * PARAMETERS :
161 *   @cameraId  : camera ID
162 *
163 * RETURN     : none
164 *==========================================================================*/
165QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
166    : mCameraId(cameraId),
167      mCameraHandle(NULL),
168      mCameraOpened(false),
169      mCameraInitialized(false),
170      mCallbackOps(NULL),
171      mInputStream(NULL),
172      mMetadataChannel(NULL),
173      mPictureChannel(NULL),
174      mFirstRequest(false),
175      mParamHeap(NULL),
176      mParameters(NULL),
177      mJpegSettings(NULL),
178      mIsZslMode(false),
179      mMinProcessedFrameDuration(0),
180      mMinJpegFrameDuration(0),
181      mMinRawFrameDuration(0),
182      m_pPowerModule(NULL)
183{
184    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
185    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
186    mCameraDevice.common.close = close_camera_device;
187    mCameraDevice.ops = &mCameraOps;
188    mCameraDevice.priv = this;
189    gCamCapability[cameraId]->version = CAM_HAL_V3;
190    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
191    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
192    gCamCapability[cameraId]->min_num_pp_bufs = 3;
193
194    pthread_cond_init(&mRequestCond, NULL);
195    mPendingRequest = 0;
196    mCurrentRequestId = -1;
197    pthread_mutex_init(&mMutex, NULL);
198
199    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
200        mDefaultMetadata[i] = NULL;
201
202#ifdef HAS_MULTIMEDIA_HINTS
203    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
204        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
205    }
206#endif
207}
208
209/*===========================================================================
210 * FUNCTION   : ~QCamera3HardwareInterface
211 *
212 * DESCRIPTION: destructor of QCamera3HardwareInterface
213 *
214 * PARAMETERS : none
215 *
216 * RETURN     : none
217 *==========================================================================*/
218QCamera3HardwareInterface::~QCamera3HardwareInterface()
219{
220    ALOGV("%s: E", __func__);
221    /* We need to stop all streams before deleting any stream */
222        /*flush the metadata list*/
223    if (!mStoredMetadataList.empty()) {
224        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
225              m != mStoredMetadataList.end(); m++) {
226            mMetadataChannel->bufDone(m->meta_buf);
227            free(m->meta_buf);
228            m = mStoredMetadataList.erase(m);
229        }
230    }
231
232    // NOTE: 'camera3_stream_t *' objects are already freed at
233    //        this stage by the framework
234    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
235        it != mStreamInfo.end(); it++) {
236        QCamera3Channel *channel = (*it)->channel;
237        if (channel) {
238            channel->stop();
239        }
240    }
241
242    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
243        it != mStreamInfo.end(); it++) {
244        QCamera3Channel *channel = (*it)->channel;
245        if ((*it)->registered && (*it)->buffer_set.buffers) {
246             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
247        }
248        if (channel)
249            delete channel;
250        free (*it);
251    }
252
253    mPictureChannel = NULL;
254
255    if (mJpegSettings != NULL) {
256        free(mJpegSettings);
257        mJpegSettings = NULL;
258    }
259
260    /* Clean up all channels */
261    if (mCameraInitialized) {
262        if (mMetadataChannel) {
263            mMetadataChannel->stop();
264            delete mMetadataChannel;
265            mMetadataChannel = NULL;
266        }
267        deinitParameters();
268    }
269
270    if (mCameraOpened)
271        closeCamera();
272
273    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
274        if (mDefaultMetadata[i])
275            free_camera_metadata(mDefaultMetadata[i]);
276
277    pthread_cond_destroy(&mRequestCond);
278
279    pthread_mutex_destroy(&mMutex);
280    ALOGV("%s: X", __func__);
281}
282
283/*===========================================================================
284 * FUNCTION   : openCamera
285 *
286 * DESCRIPTION: open camera
287 *
288 * PARAMETERS :
289 *   @hw_device  : double ptr for camera device struct
290 *
291 * RETURN     : int32_t type of status
292 *              NO_ERROR  -- success
293 *              none-zero failure code
294 *==========================================================================*/
295int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
296{
297    int rc = 0;
298    pthread_mutex_lock(&mCameraSessionLock);
299    if (mCameraSessionActive) {
300        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
301        pthread_mutex_unlock(&mCameraSessionLock);
302        return -EUSERS;
303    }
304
305    if (mCameraOpened) {
306        *hw_device = NULL;
307        return PERMISSION_DENIED;
308    }
309
310    rc = openCamera();
311    if (rc == 0) {
312        *hw_device = &mCameraDevice.common;
313        mCameraSessionActive = 1;
314    } else
315        *hw_device = NULL;
316
317#ifdef HAS_MULTIMEDIA_HINTS
318    if (rc == 0) {
319        if (m_pPowerModule) {
320            if (m_pPowerModule->powerHint) {
321                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
322                        (void *)"state=1");
323            }
324        }
325    }
326#endif
327    pthread_mutex_unlock(&mCameraSessionLock);
328    return rc;
329}
330
331/*===========================================================================
332 * FUNCTION   : openCamera
333 *
334 * DESCRIPTION: open camera
335 *
336 * PARAMETERS : none
337 *
338 * RETURN     : int32_t type of status
339 *              NO_ERROR  -- success
340 *              none-zero failure code
341 *==========================================================================*/
342int QCamera3HardwareInterface::openCamera()
343{
344    if (mCameraHandle) {
345        ALOGE("Failure: Camera already opened");
346        return ALREADY_EXISTS;
347    }
348    mCameraHandle = camera_open(mCameraId);
349    if (!mCameraHandle) {
350        ALOGE("camera_open failed.");
351        return UNKNOWN_ERROR;
352    }
353
354    mCameraOpened = true;
355
356    return NO_ERROR;
357}
358
359/*===========================================================================
360 * FUNCTION   : closeCamera
361 *
362 * DESCRIPTION: close camera
363 *
364 * PARAMETERS : none
365 *
366 * RETURN     : int32_t type of status
367 *              NO_ERROR  -- success
368 *              none-zero failure code
369 *==========================================================================*/
370int QCamera3HardwareInterface::closeCamera()
371{
372    int rc = NO_ERROR;
373
374    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
375    mCameraHandle = NULL;
376    mCameraOpened = false;
377
378#ifdef HAS_MULTIMEDIA_HINTS
379    if (rc == NO_ERROR) {
380        if (m_pPowerModule) {
381            if (m_pPowerModule->powerHint) {
382                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
383                        (void *)"state=0");
384            }
385        }
386    }
387#endif
388
389    return rc;
390}
391
392/*===========================================================================
393 * FUNCTION   : initialize
394 *
395 * DESCRIPTION: Initialize frameworks callback functions
396 *
397 * PARAMETERS :
398 *   @callback_ops : callback function to frameworks
399 *
400 * RETURN     :
401 *
402 *==========================================================================*/
403int QCamera3HardwareInterface::initialize(
404        const struct camera3_callback_ops *callback_ops)
405{
406    int rc;
407
408    pthread_mutex_lock(&mMutex);
409
410    rc = initParameters();
411    if (rc < 0) {
412        ALOGE("%s: initParamters failed %d", __func__, rc);
413       goto err1;
414    }
415    mCallbackOps = callback_ops;
416
417    pthread_mutex_unlock(&mMutex);
418    mCameraInitialized = true;
419    return 0;
420
421err1:
422    pthread_mutex_unlock(&mMutex);
423    return rc;
424}
425
426/*===========================================================================
427 * FUNCTION   : configureStreams
428 *
429 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
430 *              and output streams.
431 *
432 * PARAMETERS :
433 *   @stream_list : streams to be configured
434 *
435 * RETURN     :
436 *
437 *==========================================================================*/
438int QCamera3HardwareInterface::configureStreams(
439        camera3_stream_configuration_t *streamList)
440{
441    int rc = 0;
442    mIsZslMode = false;
443
444    // Sanity check stream_list
445    if (streamList == NULL) {
446        ALOGE("%s: NULL stream configuration", __func__);
447        return BAD_VALUE;
448    }
449    if (streamList->streams == NULL) {
450        ALOGE("%s: NULL stream list", __func__);
451        return BAD_VALUE;
452    }
453
454    if (streamList->num_streams < 1) {
455        ALOGE("%s: Bad number of streams requested: %d", __func__,
456                streamList->num_streams);
457        return BAD_VALUE;
458    }
459
460    /* first invalidate all the steams in the mStreamList
461     * if they appear again, they will be validated */
462    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
463            it != mStreamInfo.end(); it++) {
464        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
465        channel->stop();
466        (*it)->status = INVALID;
467    }
468    if (mMetadataChannel) {
469        /* If content of mStreamInfo is not 0, there is metadata stream */
470        mMetadataChannel->stop();
471    }
472
473    pthread_mutex_lock(&mMutex);
474
475    camera3_stream_t *inputStream = NULL;
476    camera3_stream_t *jpegStream = NULL;
477    cam_stream_size_info_t stream_config_info;
478
479    for (size_t i = 0; i < streamList->num_streams; i++) {
480        camera3_stream_t *newStream = streamList->streams[i];
481        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
482                __func__, newStream->stream_type, newStream->format,
483                 newStream->width, newStream->height);
484        //if the stream is in the mStreamList validate it
485        bool stream_exists = false;
486        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
487                it != mStreamInfo.end(); it++) {
488            if ((*it)->stream == newStream) {
489                QCamera3Channel *channel =
490                    (QCamera3Channel*)(*it)->stream->priv;
491                stream_exists = true;
492                (*it)->status = RECONFIGURE;
493                /*delete the channel object associated with the stream because
494                  we need to reconfigure*/
495                delete channel;
496                (*it)->stream->priv = NULL;
497                (*it)->channel = NULL;
498            }
499        }
500        if (!stream_exists) {
501            //new stream
502            stream_info_t* stream_info;
503            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
504            stream_info->stream = newStream;
505            stream_info->status = VALID;
506            stream_info->registered = 0;
507            stream_info->channel = NULL;
508            mStreamInfo.push_back(stream_info);
509        }
510        if (newStream->stream_type == CAMERA3_STREAM_INPUT
511                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
512            if (inputStream != NULL) {
513                ALOGE("%s: Multiple input streams requested!", __func__);
514                pthread_mutex_unlock(&mMutex);
515                return BAD_VALUE;
516            }
517            inputStream = newStream;
518        }
519        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
520            jpegStream = newStream;
521        }
522    }
523    mInputStream = inputStream;
524
525    /*clean up invalid streams*/
526    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
527            it != mStreamInfo.end();) {
528        if(((*it)->status) == INVALID){
529            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
530            delete channel;
531            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
532            free(*it);
533            it = mStreamInfo.erase(it);
534        } else {
535            it++;
536        }
537    }
538    if (mMetadataChannel) {
539        delete mMetadataChannel;
540        mMetadataChannel = NULL;
541    }
542
543    //Create metadata channel and initialize it
544    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
545                    mCameraHandle->ops, captureResultCb,
546                    &gCamCapability[mCameraId]->padding_info, this);
547    if (mMetadataChannel == NULL) {
548        ALOGE("%s: failed to allocate metadata channel", __func__);
549        rc = -ENOMEM;
550        pthread_mutex_unlock(&mMutex);
551        return rc;
552    }
553    rc = mMetadataChannel->initialize();
554    if (rc < 0) {
555        ALOGE("%s: metadata channel initialization failed", __func__);
556        delete mMetadataChannel;
557        mMetadataChannel = NULL;
558        pthread_mutex_unlock(&mMutex);
559        return rc;
560    }
561
562    /* Allocate channel objects for the requested streams */
563    for (size_t i = 0; i < streamList->num_streams; i++) {
564        camera3_stream_t *newStream = streamList->streams[i];
565        uint32_t stream_usage = newStream->usage;
566        stream_config_info.stream_sizes[i].width = newStream->width;
567        stream_config_info.stream_sizes[i].height = newStream->height;
568        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
569            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
570            //for zsl stream the size is jpeg size
571            stream_config_info.stream_sizes[i].width = jpegStream->width;
572            stream_config_info.stream_sizes[i].height = jpegStream->height;
573            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
574        } else {
575           //for non zsl streams find out the format
576           switch (newStream->format) {
577           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
578              {
579                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
580                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
581                 } else {
582                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
583                 }
584              }
585              break;
586           case HAL_PIXEL_FORMAT_YCbCr_420_888:
587              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
588              break;
589           case HAL_PIXEL_FORMAT_BLOB:
590              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
591              break;
592           default:
593              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
594              break;
595           }
596        }
597        if (newStream->priv == NULL) {
598            //New stream, construct channel
599            switch (newStream->stream_type) {
600            case CAMERA3_STREAM_INPUT:
601                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
602                break;
603            case CAMERA3_STREAM_BIDIRECTIONAL:
604                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
605                    GRALLOC_USAGE_HW_CAMERA_WRITE;
606                break;
607            case CAMERA3_STREAM_OUTPUT:
608                /* For video encoding stream, set read/write rarely
609                 * flag so that they may be set to un-cached */
610                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
611                    newStream->usage =
612                         (GRALLOC_USAGE_SW_READ_RARELY |
613                         GRALLOC_USAGE_SW_WRITE_RARELY |
614                         GRALLOC_USAGE_HW_CAMERA_WRITE);
615                else
616                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
617                break;
618            default:
619                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
620                break;
621            }
622
623            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
624                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
625                QCamera3Channel *channel;
626                switch (newStream->format) {
627                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
628                case HAL_PIXEL_FORMAT_YCbCr_420_888:
629                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
630                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
631                        jpegStream) {
632                        uint32_t width = jpegStream->width;
633                        uint32_t height = jpegStream->height;
634                        mIsZslMode = true;
635                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
636                            mCameraHandle->ops, captureResultCb,
637                            &gCamCapability[mCameraId]->padding_info, this, newStream,
638                            width, height);
639                    } else
640                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
641                            mCameraHandle->ops, captureResultCb,
642                            &gCamCapability[mCameraId]->padding_info, this, newStream);
643                    if (channel == NULL) {
644                        ALOGE("%s: allocation of channel failed", __func__);
645                        pthread_mutex_unlock(&mMutex);
646                        return -ENOMEM;
647                    }
648
649                    newStream->priv = channel;
650                    break;
651                case HAL_PIXEL_FORMAT_BLOB:
652                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
653                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
654                            mCameraHandle->ops, captureResultCb,
655                            &gCamCapability[mCameraId]->padding_info, this, newStream);
656                    if (mPictureChannel == NULL) {
657                        ALOGE("%s: allocation of channel failed", __func__);
658                        pthread_mutex_unlock(&mMutex);
659                        return -ENOMEM;
660                    }
661                    newStream->priv = (QCamera3Channel*)mPictureChannel;
662                    break;
663
664                //TODO: Add support for app consumed format?
665                default:
666                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
667                    break;
668                }
669            }
670
671            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
672                    it != mStreamInfo.end(); it++) {
673                if ((*it)->stream == newStream) {
674                    (*it)->channel = (QCamera3Channel*) newStream->priv;
675                    break;
676                }
677            }
678        } else {
679            // Channel already exists for this stream
680            // Do nothing for now
681        }
682    }
683
684    int32_t hal_version = CAM_HAL_V3;
685    stream_config_info.num_streams = streamList->num_streams;
686
687    // settings/parameters don't carry over for new configureStreams
688    memset(mParameters, 0, sizeof(parm_buffer_t));
689
690    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
691    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
692                sizeof(hal_version), &hal_version);
693
694    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
695                sizeof(stream_config_info), &stream_config_info);
696
697    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
698
699    /*For the streams to be reconfigured we need to register the buffers
700      since the framework wont*/
701    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
702            it != mStreamInfo.end(); it++) {
703        if ((*it)->status == RECONFIGURE) {
704            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
705            /*only register buffers for streams that have already been
706              registered*/
707            if ((*it)->registered) {
708                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
709                        (*it)->buffer_set.buffers);
710                if (rc != NO_ERROR) {
711                    ALOGE("%s: Failed to register the buffers of old stream,\
712                            rc = %d", __func__, rc);
713                }
714                ALOGV("%s: channel %p has %d buffers",
715                        __func__, channel, (*it)->buffer_set.num_buffers);
716            }
717        }
718
719        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
720        if (index == NAME_NOT_FOUND) {
721            mPendingBuffersMap.add((*it)->stream, 0);
722        } else {
723            mPendingBuffersMap.editValueAt(index) = 0;
724        }
725    }
726
727    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
728    mPendingRequestsList.clear();
729
730    mPendingFrameDropList.clear();
731
732    /*flush the metadata list*/
733    if (!mStoredMetadataList.empty()) {
734        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
735              m != mStoredMetadataList.end(); m++) {
736            mMetadataChannel->bufDone(m->meta_buf);
737            free(m->meta_buf);
738            m = mStoredMetadataList.erase(m);
739        }
740    }
741
742    mFirstRequest = true;
743
744    //Get min frame duration for this streams configuration
745    deriveMinFrameDuration();
746
747    pthread_mutex_unlock(&mMutex);
748    return rc;
749}
750
751/*===========================================================================
752 * FUNCTION   : validateCaptureRequest
753 *
754 * DESCRIPTION: validate a capture request from camera service
755 *
756 * PARAMETERS :
757 *   @request : request from framework to process
758 *
759 * RETURN     :
760 *
761 *==========================================================================*/
762int QCamera3HardwareInterface::validateCaptureRequest(
763                    camera3_capture_request_t *request)
764{
765    ssize_t idx = 0;
766    const camera3_stream_buffer_t *b;
767    CameraMetadata meta;
768
769    /* Sanity check the request */
770    if (request == NULL) {
771        ALOGE("%s: NULL capture request", __func__);
772        return BAD_VALUE;
773    }
774
775    uint32_t frameNumber = request->frame_number;
776    if (request->input_buffer != NULL &&
777            request->input_buffer->stream != mInputStream) {
778        ALOGE("%s: Request %d: Input buffer not from input stream!",
779                __FUNCTION__, frameNumber);
780        return BAD_VALUE;
781    }
782    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
783        ALOGE("%s: Request %d: No output buffers provided!",
784                __FUNCTION__, frameNumber);
785        return BAD_VALUE;
786    }
787    if (request->input_buffer != NULL) {
788        b = request->input_buffer;
789        QCamera3Channel *channel =
790            static_cast<QCamera3Channel*>(b->stream->priv);
791        if (channel == NULL) {
792            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
793                    __func__, frameNumber, idx);
794            return BAD_VALUE;
795        }
796        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
797            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
798                    __func__, frameNumber, idx);
799            return BAD_VALUE;
800        }
801        if (b->release_fence != -1) {
802            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
803                    __func__, frameNumber, idx);
804            return BAD_VALUE;
805        }
806        if (b->buffer == NULL) {
807            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
808                    __func__, frameNumber, idx);
809            return BAD_VALUE;
810        }
811    }
812
813    // Validate all buffers
814    b = request->output_buffers;
815    do {
816        QCamera3Channel *channel =
817                static_cast<QCamera3Channel*>(b->stream->priv);
818        if (channel == NULL) {
819            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
820                    __func__, frameNumber, idx);
821            return BAD_VALUE;
822        }
823        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
824            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
825                    __func__, frameNumber, idx);
826            return BAD_VALUE;
827        }
828        if (b->release_fence != -1) {
829            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
830                    __func__, frameNumber, idx);
831            return BAD_VALUE;
832        }
833        if (b->buffer == NULL) {
834            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
835                    __func__, frameNumber, idx);
836            return BAD_VALUE;
837        }
838        idx++;
839        b = request->output_buffers + idx;
840    } while (idx < (ssize_t)request->num_output_buffers);
841
842    return NO_ERROR;
843}
844
845/*===========================================================================
846 * FUNCTION   : deriveMinFrameDuration
847 *
848 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
849 *              on currently configured streams.
850 *
851 * PARAMETERS : NONE
852 *
853 * RETURN     : NONE
854 *
855 *==========================================================================*/
856void QCamera3HardwareInterface::deriveMinFrameDuration()
857{
858    int32_t maxJpegDimension, maxProcessedDimension;
859
860    maxJpegDimension = 0;
861    maxProcessedDimension = 0;
862
863    // Figure out maximum jpeg, processed, and raw dimensions
864    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
865        it != mStreamInfo.end(); it++) {
866
867        // Input stream doesn't have valid stream_type
868        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
869            continue;
870
871        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
872        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
873            if (dimension > maxJpegDimension)
874                maxJpegDimension = dimension;
875        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
876            if (dimension > maxProcessedDimension)
877                maxProcessedDimension = dimension;
878        }
879    }
880
881    //Assume all jpeg dimensions are in processed dimensions.
882    if (maxJpegDimension > maxProcessedDimension)
883        maxProcessedDimension = maxJpegDimension;
884
885    //Find minimum durations for processed, jpeg, and raw
886    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
887    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
888        if (maxProcessedDimension ==
889            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
890            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
891            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
892            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
893            break;
894        }
895    }
896}
897
898/*===========================================================================
899 * FUNCTION   : getMinFrameDuration
900 *
901 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
902 *              and current request configuration.
903 *
904 * PARAMETERS : @request: requset sent by the frameworks
905 *
906 * RETURN     : min farme duration for a particular request
907 *
908 *==========================================================================*/
909int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
910{
911    bool hasJpegStream = false;
912    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
913        const camera3_stream_t *stream = request->output_buffers[i].stream;
914        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
915            hasJpegStream = true;
916    }
917
918    if (!hasJpegStream)
919        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
920    else
921        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
922}
923
924/*===========================================================================
925 * FUNCTION   : registerStreamBuffers
926 *
927 * DESCRIPTION: Register buffers for a given stream with the HAL device.
928 *
929 * PARAMETERS :
930 *   @stream_list : streams to be configured
931 *
932 * RETURN     :
933 *
934 *==========================================================================*/
935int QCamera3HardwareInterface::registerStreamBuffers(
936        const camera3_stream_buffer_set_t *buffer_set)
937{
938    int rc = 0;
939
940    pthread_mutex_lock(&mMutex);
941
942    if (buffer_set == NULL) {
943        ALOGE("%s: Invalid buffer_set parameter.", __func__);
944        pthread_mutex_unlock(&mMutex);
945        return -EINVAL;
946    }
947    if (buffer_set->stream == NULL) {
948        ALOGE("%s: Invalid stream parameter.", __func__);
949        pthread_mutex_unlock(&mMutex);
950        return -EINVAL;
951    }
952    if (buffer_set->num_buffers < 1) {
953        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
954        pthread_mutex_unlock(&mMutex);
955        return -EINVAL;
956    }
957    if (buffer_set->buffers == NULL) {
958        ALOGE("%s: Invalid buffers parameter.", __func__);
959        pthread_mutex_unlock(&mMutex);
960        return -EINVAL;
961    }
962
963    camera3_stream_t *stream = buffer_set->stream;
964    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
965
966    //set the buffer_set in the mStreamInfo array
967    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
968            it != mStreamInfo.end(); it++) {
969        if ((*it)->stream == stream) {
970            uint32_t numBuffers = buffer_set->num_buffers;
971            (*it)->buffer_set.stream = buffer_set->stream;
972            (*it)->buffer_set.num_buffers = numBuffers;
973            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
974            if ((*it)->buffer_set.buffers == NULL) {
975                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
976                pthread_mutex_unlock(&mMutex);
977                return -ENOMEM;
978            }
979            for (size_t j = 0; j < numBuffers; j++){
980                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
981            }
982            (*it)->registered = 1;
983        }
984    }
985    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
986    if (rc < 0) {
987        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
988        pthread_mutex_unlock(&mMutex);
989        return -ENODEV;
990    }
991
992    pthread_mutex_unlock(&mMutex);
993    return NO_ERROR;
994}
995
996/*===========================================================================
997 * FUNCTION   : processCaptureRequest
998 *
999 * DESCRIPTION: process a capture request from camera service
1000 *
1001 * PARAMETERS :
1002 *   @request : request from framework to process
1003 *
1004 * RETURN     :
1005 *
1006 *==========================================================================*/
1007int QCamera3HardwareInterface::processCaptureRequest(
1008                    camera3_capture_request_t *request)
1009{
1010    int rc = NO_ERROR;
1011    int32_t request_id;
1012    CameraMetadata meta;
1013    MetadataBufferInfo reproc_meta;
1014    int queueMetadata = 0;
1015
1016    pthread_mutex_lock(&mMutex);
1017
1018    rc = validateCaptureRequest(request);
1019    if (rc != NO_ERROR) {
1020        ALOGE("%s: incoming request is not valid", __func__);
1021        pthread_mutex_unlock(&mMutex);
1022        return rc;
1023    }
1024
1025    meta = request->settings;
1026
1027    // For first capture request, send capture intent, and
1028    // stream on all streams
1029    if (mFirstRequest) {
1030
1031        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1032            int32_t hal_version = CAM_HAL_V3;
1033            uint8_t captureIntent =
1034                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1035
1036            memset(mParameters, 0, sizeof(parm_buffer_t));
1037            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1038            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1039                sizeof(hal_version), &hal_version);
1040            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1041                sizeof(captureIntent), &captureIntent);
1042            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1043                mParameters);
1044        }
1045
1046        mMetadataChannel->start();
1047        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1048            it != mStreamInfo.end(); it++) {
1049            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1050            channel->start();
1051        }
1052    }
1053
1054    uint32_t frameNumber = request->frame_number;
1055    uint32_t streamTypeMask = 0;
1056
1057    if (meta.exists(ANDROID_REQUEST_ID)) {
1058        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1059        mCurrentRequestId = request_id;
1060        ALOGV("%s: Received request with id: %d",__func__, request_id);
1061    } else if (mFirstRequest || mCurrentRequestId == -1){
1062        ALOGE("%s: Unable to find request id field, \
1063                & no previous id available", __func__);
1064        return NAME_NOT_FOUND;
1065    } else {
1066        ALOGV("%s: Re-using old request id", __func__);
1067        request_id = mCurrentRequestId;
1068    }
1069
1070    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1071                                    __func__, __LINE__,
1072                                    request->num_output_buffers,
1073                                    request->input_buffer,
1074                                    frameNumber);
1075    // Acquire all request buffers first
1076    int blob_request = 0;
1077    for (size_t i = 0; i < request->num_output_buffers; i++) {
1078        const camera3_stream_buffer_t& output = request->output_buffers[i];
1079        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1080        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1081
1082        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1083        //Call function to store local copy of jpeg data for encode params.
1084            blob_request = 1;
1085            rc = getJpegSettings(request->settings);
1086            if (rc < 0) {
1087                ALOGE("%s: failed to get jpeg parameters", __func__);
1088                pthread_mutex_unlock(&mMutex);
1089                return rc;
1090            }
1091        }
1092
1093        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1094        if (rc != OK) {
1095            ALOGE("%s: fence wait failed %d", __func__, rc);
1096            pthread_mutex_unlock(&mMutex);
1097            return rc;
1098        }
1099        streamTypeMask |= channel->getStreamTypeMask();
1100    }
1101
1102    rc = setFrameParameters(request, streamTypeMask);
1103    if (rc < 0) {
1104        ALOGE("%s: fail to set frame parameters", __func__);
1105        pthread_mutex_unlock(&mMutex);
1106        return rc;
1107    }
1108
1109    /* Update pending request list and pending buffers map */
1110    PendingRequestInfo pendingRequest;
1111    pendingRequest.frame_number = frameNumber;
1112    pendingRequest.num_buffers = request->num_output_buffers;
1113    pendingRequest.request_id = request_id;
1114    pendingRequest.blob_request = blob_request;
1115    if (blob_request)
1116        pendingRequest.input_jpeg_settings = *mJpegSettings;
1117    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1118
1119    for (size_t i = 0; i < request->num_output_buffers; i++) {
1120        RequestedBufferInfo requestedBuf;
1121        requestedBuf.stream = request->output_buffers[i].stream;
1122        requestedBuf.buffer = NULL;
1123        pendingRequest.buffers.push_back(requestedBuf);
1124
1125        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1126    }
1127    mPendingRequestsList.push_back(pendingRequest);
1128
1129    // Notify metadata channel we receive a request
1130    mMetadataChannel->request(NULL, frameNumber);
1131
1132    // Call request on other streams
1133    for (size_t i = 0; i < request->num_output_buffers; i++) {
1134        const camera3_stream_buffer_t& output = request->output_buffers[i];
1135        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1136        mm_camera_buf_def_t *pInputBuffer = NULL;
1137
1138        if (channel == NULL) {
1139            ALOGE("%s: invalid channel pointer for stream", __func__);
1140            continue;
1141        }
1142
1143        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1144            QCamera3RegularChannel* inputChannel = NULL;
1145            if(request->input_buffer != NULL){
1146                //Try to get the internal format
1147                inputChannel = (QCamera3RegularChannel*)
1148                    request->input_buffer->stream->priv;
1149                if(inputChannel == NULL ){
1150                    ALOGE("%s: failed to get input channel handle", __func__);
1151                } else {
1152                    pInputBuffer =
1153                        inputChannel->getInternalFormatBuffer(
1154                                request->input_buffer->buffer);
1155                    ALOGD("%s: Input buffer dump",__func__);
1156                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1157                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1158                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1159                    ALOGD("Handle:%p", request->input_buffer->buffer);
1160                    //TODO: need to get corresponding metadata and send it to pproc
1161                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1162                         m != mStoredMetadataList.end(); m++) {
1163                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1164                            reproc_meta.meta_buf = m->meta_buf;
1165                            queueMetadata = 1;
1166                            break;
1167                        }
1168                    }
1169                }
1170            }
1171            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1172                            pInputBuffer,(QCamera3Channel*)inputChannel);
1173            if (queueMetadata) {
1174                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1175            }
1176        } else {
1177            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1178                __LINE__, output.buffer, frameNumber);
1179            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1180                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1181                     m != mStoredMetadataList.end(); m++) {
1182                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1183                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1184                            mMetadataChannel->bufDone(m->meta_buf);
1185                            free(m->meta_buf);
1186                            m = mStoredMetadataList.erase(m);
1187                            break;
1188                        }
1189                   }
1190                }
1191            }
1192            rc = channel->request(output.buffer, frameNumber);
1193        }
1194        if (rc < 0)
1195            ALOGE("%s: request failed", __func__);
1196    }
1197
1198    mFirstRequest = false;
1199    // Added a timed condition wait
1200    struct timespec ts;
1201    uint8_t isValidTimeout = 1;
1202    rc = clock_gettime(CLOCK_REALTIME, &ts);
1203    if (rc < 0) {
1204        isValidTimeout = 0;
1205        ALOGE("%s: Error reading the real time clock!!", __func__);
1206    }
1207    else {
1208        // Make timeout as 5 sec for request to be honored
1209        ts.tv_sec += 5;
1210    }
1211    //Block on conditional variable
1212    mPendingRequest = 1;
1213    while (mPendingRequest == 1) {
1214        if (!isValidTimeout) {
1215            ALOGV("%s: Blocking on conditional wait", __func__);
1216            pthread_cond_wait(&mRequestCond, &mMutex);
1217        }
1218        else {
1219            ALOGV("%s: Blocking on timed conditional wait", __func__);
1220            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1221            if (rc == ETIMEDOUT) {
1222                rc = -ENODEV;
1223                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1224                break;
1225            }
1226        }
1227        ALOGV("%s: Unblocked", __func__);
1228    }
1229
1230    pthread_mutex_unlock(&mMutex);
1231
1232    return rc;
1233}
1234
1235/*===========================================================================
1236 * FUNCTION   : getMetadataVendorTagOps
1237 *
1238 * DESCRIPTION:
1239 *
1240 * PARAMETERS :
1241 *
1242 *
1243 * RETURN     :
1244 *==========================================================================*/
1245void QCamera3HardwareInterface::getMetadataVendorTagOps(
1246                    vendor_tag_query_ops_t* /*ops*/)
1247{
1248    /* Enable locks when we eventually add Vendor Tags */
1249    /*
1250    pthread_mutex_lock(&mMutex);
1251
1252    pthread_mutex_unlock(&mMutex);
1253    */
1254    return;
1255}
1256
1257/*===========================================================================
1258 * FUNCTION   : dump
1259 *
1260 * DESCRIPTION:
1261 *
1262 * PARAMETERS :
1263 *
1264 *
1265 * RETURN     :
1266 *==========================================================================*/
1267void QCamera3HardwareInterface::dump(int /*fd*/)
1268{
1269    /*Enable lock when we implement this function*/
1270    /*
1271    pthread_mutex_lock(&mMutex);
1272
1273    pthread_mutex_unlock(&mMutex);
1274    */
1275    return;
1276}
1277
1278/*===========================================================================
1279 * FUNCTION   : flush
1280 *
1281 * DESCRIPTION:
1282 *
1283 * PARAMETERS :
1284 *
1285 *
1286 * RETURN     :
1287 *==========================================================================*/
1288int QCamera3HardwareInterface::flush()
1289{
1290    /*Enable lock when we implement this function*/
1291    /*
1292    pthread_mutex_lock(&mMutex);
1293
1294    pthread_mutex_unlock(&mMutex);
1295    */
1296    return 0;
1297}
1298
1299/*===========================================================================
1300 * FUNCTION   : captureResultCb
1301 *
1302 * DESCRIPTION: Callback handler for all capture result
1303 *              (streams, as well as metadata)
1304 *
1305 * PARAMETERS :
1306 *   @metadata : metadata information
1307 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1308 *               NULL if metadata.
1309 *
1310 * RETURN     : NONE
1311 *==========================================================================*/
1312void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1313                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1314{
1315    pthread_mutex_lock(&mMutex);
1316
1317    if (metadata_buf) {
1318        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1319        int32_t frame_number_valid = *(int32_t *)
1320            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1321        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1322            CAM_INTF_META_PENDING_REQUESTS, metadata);
1323        uint32_t frame_number = *(uint32_t *)
1324            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1325        const struct timeval *tv = (const struct timeval *)
1326            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1327        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1328            tv->tv_usec * NSEC_PER_USEC;
1329        cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1330            POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1331
1332        if (!frame_number_valid) {
1333            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1334            mMetadataChannel->bufDone(metadata_buf);
1335            free(metadata_buf);
1336            goto done_metadata;
1337        }
1338        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1339                frame_number, capture_time);
1340
1341        // Go through the pending requests info and send shutter/results to frameworks
1342        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1343                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1344            camera3_capture_result_t result;
1345            camera3_notify_msg_t notify_msg;
1346            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1347
1348            // Flush out all entries with less or equal frame numbers.
1349
1350            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1351            //Right now it's the same as metadata timestamp
1352
1353            //TODO: When there is metadata drop, how do we derive the timestamp of
1354            //dropped frames? For now, we fake the dropped timestamp by substracting
1355            //from the reported timestamp
1356            nsecs_t current_capture_time = capture_time -
1357                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1358
1359            // Send shutter notify to frameworks
1360            notify_msg.type = CAMERA3_MSG_SHUTTER;
1361            notify_msg.message.shutter.frame_number = i->frame_number;
1362            notify_msg.message.shutter.timestamp = current_capture_time;
1363            mCallbackOps->notify(mCallbackOps, &notify_msg);
1364            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1365                    i->frame_number, capture_time);
1366
1367            // Check whether any stream buffer corresponding to this is dropped or not
1368            // If dropped, then send the ERROR_BUFFER for the corresponding stream
1369            if (cam_frame_drop.frame_dropped) {
1370                camera3_notify_msg_t notify_msg;
1371                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1372                        j != i->buffers.end(); j++) {
1373                    QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1374                    uint32_t streamTypeMask = channel->getStreamTypeMask();
1375                    if (streamTypeMask & cam_frame_drop.stream_type_mask) {
1376                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1377                        ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
1378                               __func__, i->frame_number, streamTypeMask);
1379                        notify_msg.type = CAMERA3_MSG_ERROR;
1380                        notify_msg.message.error.frame_number = i->frame_number;
1381                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1382                        notify_msg.message.error.error_stream = j->stream;
1383                        mCallbackOps->notify(mCallbackOps, &notify_msg);
1384                        ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
1385                               __func__, i->frame_number, streamTypeMask);
1386                        PendingFrameDropInfo PendingFrameDrop;
1387                        PendingFrameDrop.frame_number=i->frame_number;
1388                        PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
1389                        // Add the Frame drop info to mPendingFrameDropList
1390                        mPendingFrameDropList.push_back(PendingFrameDrop);
1391                    }
1392                }
1393            }
1394
1395            // Send empty metadata with already filled buffers for dropped metadata
1396            // and send valid metadata with already filled buffers for current metadata
1397            if (i->frame_number < frame_number) {
1398                CameraMetadata dummyMetadata;
1399                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1400                        &current_capture_time, 1);
1401                dummyMetadata.update(ANDROID_REQUEST_ID,
1402                        &(i->request_id), 1);
1403                result.result = dummyMetadata.release();
1404            } else {
1405                result.result = translateCbMetadataToResultMetadata(metadata,
1406                        current_capture_time, i->request_id, i->blob_request,
1407                        &(i->input_jpeg_settings));
1408                if (mIsZslMode) {
1409                   int found_metadata = 0;
1410                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1411                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1412                        j != i->buffers.end(); j++) {
1413                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1414                         //check if corresp. zsl already exists in the stored metadata list
1415                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1416                               m != mStoredMetadataList.begin(); m++) {
1417                            if (m->frame_number == frame_number) {
1418                               m->meta_buf = metadata_buf;
1419                               found_metadata = 1;
1420                               break;
1421                            }
1422                         }
1423                         if (!found_metadata) {
1424                            MetadataBufferInfo store_meta_info;
1425                            store_meta_info.meta_buf = metadata_buf;
1426                            store_meta_info.frame_number = frame_number;
1427                            mStoredMetadataList.push_back(store_meta_info);
1428                            found_metadata = 1;
1429                         }
1430                      }
1431                   }
1432                   if (!found_metadata) {
1433                       if (!i->input_buffer_present && i->blob_request) {
1434                          //livesnapshot or fallback non-zsl snapshot case
1435                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1436                                j != i->buffers.end(); j++){
1437                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1438                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1439                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1440                                 break;
1441                              }
1442                         }
1443                       } else {
1444                            //return the metadata immediately
1445                            mMetadataChannel->bufDone(metadata_buf);
1446                            free(metadata_buf);
1447                       }
1448                   }
1449               } else if (!mIsZslMode && i->blob_request) {
1450                   //If it is a blob request then send the metadata to the picture channel
1451                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1452               } else {
1453                   // Return metadata buffer
1454                   mMetadataChannel->bufDone(metadata_buf);
1455                   free(metadata_buf);
1456               }
1457
1458            }
1459            if (!result.result) {
1460                ALOGE("%s: metadata is NULL", __func__);
1461            }
1462            result.frame_number = i->frame_number;
1463            result.num_output_buffers = 0;
1464            result.output_buffers = NULL;
1465            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1466                    j != i->buffers.end(); j++) {
1467                if (j->buffer) {
1468                    result.num_output_buffers++;
1469                }
1470            }
1471
1472            if (result.num_output_buffers > 0) {
1473                camera3_stream_buffer_t *result_buffers =
1474                    new camera3_stream_buffer_t[result.num_output_buffers];
1475                if (!result_buffers) {
1476                    ALOGE("%s: Fatal error: out of memory", __func__);
1477                }
1478                size_t result_buffers_idx = 0;
1479                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1480                     j != i->buffers.end(); j++) {
1481                     if (j->buffer) {
1482                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1483                              m != mPendingFrameDropList.end(); m++) {
1484                              QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1485                              uint32_t streamTypeMask = channel->getStreamTypeMask();
1486                              if((m->stream_type_mask & streamTypeMask) &&
1487                                  (m->frame_number==frame_number)) {
1488                                  j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1489                                  ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1490                                        __func__, frame_number, streamTypeMask);
1491                                  m = mPendingFrameDropList.erase(m);
1492                                  break;
1493                              }
1494                         }
1495                         result_buffers[result_buffers_idx++] = *(j->buffer);
1496                         free(j->buffer);
1497                         j->buffer = NULL;
1498                         mPendingBuffersMap.editValueFor(j->stream)--;
1499                    }
1500                }
1501                result.output_buffers = result_buffers;
1502
1503                mCallbackOps->process_capture_result(mCallbackOps, &result);
1504                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1505                        __func__, result.frame_number, current_capture_time);
1506                free_camera_metadata((camera_metadata_t *)result.result);
1507                delete[] result_buffers;
1508            } else {
1509                mCallbackOps->process_capture_result(mCallbackOps, &result);
1510                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1511                        __func__, result.frame_number, current_capture_time);
1512                free_camera_metadata((camera_metadata_t *)result.result);
1513            }
1514            // erase the element from the list
1515            i = mPendingRequestsList.erase(i);
1516        }
1517
1518
1519done_metadata:
1520        bool max_buffers_dequeued = false;
1521        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1522            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1523            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1524            if (queued_buffers == stream->max_buffers) {
1525                max_buffers_dequeued = true;
1526                break;
1527            }
1528        }
1529        if (!max_buffers_dequeued && !pending_requests) {
1530            // Unblock process_capture_request
1531            mPendingRequest = 0;
1532            pthread_cond_signal(&mRequestCond);
1533        }
1534    } else {
1535        // If the frame number doesn't exist in the pending request list,
1536        // directly send the buffer to the frameworks, and update pending buffers map
1537        // Otherwise, book-keep the buffer.
1538        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1539        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1540            i++;
1541        }
1542        if (i == mPendingRequestsList.end()) {
1543            // Verify all pending requests frame_numbers are greater
1544            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1545                    j != mPendingRequestsList.end(); j++) {
1546                if (j->frame_number < frame_number) {
1547                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1548                            __func__, j->frame_number, frame_number);
1549                }
1550            }
1551            camera3_capture_result_t result;
1552            result.result = NULL;
1553            result.frame_number = frame_number;
1554            result.num_output_buffers = 1;
1555            for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1556                  m != mPendingFrameDropList.end(); m++) {
1557                QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1558                uint32_t streamTypeMask = channel->getStreamTypeMask();
1559                if((m->stream_type_mask & streamTypeMask) &&
1560                    (m->frame_number==frame_number) ) {
1561                    buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1562                    ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1563                            __func__, frame_number, streamTypeMask);
1564                    m = mPendingFrameDropList.erase(m);
1565                    break;
1566                }
1567            }
1568            result.output_buffers = buffer;
1569            ALOGV("%s: result frame_number = %d, buffer = %p",
1570                    __func__, frame_number, buffer);
1571            mPendingBuffersMap.editValueFor(buffer->stream)--;
1572            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1573                int found = 0;
1574                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1575                      k != mStoredMetadataList.end(); k++) {
1576                    if (k->frame_number == frame_number) {
1577                        k->zsl_buf_hdl = buffer->buffer;
1578                        found = 1;
1579                        break;
1580                    }
1581                }
1582                if (!found) {
1583                   MetadataBufferInfo meta_info;
1584                   meta_info.frame_number = frame_number;
1585                   meta_info.zsl_buf_hdl = buffer->buffer;
1586                   mStoredMetadataList.push_back(meta_info);
1587                }
1588            }
1589            mCallbackOps->process_capture_result(mCallbackOps, &result);
1590        } else {
1591            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1592                    j != i->buffers.end(); j++) {
1593                if (j->stream == buffer->stream) {
1594                    if (j->buffer != NULL) {
1595                        ALOGE("%s: Error: buffer is already set", __func__);
1596                    } else {
1597                        j->buffer = (camera3_stream_buffer_t *)malloc(
1598                                sizeof(camera3_stream_buffer_t));
1599                        *(j->buffer) = *buffer;
1600                        ALOGV("%s: cache buffer %p at result frame_number %d",
1601                                __func__, buffer, frame_number);
1602                    }
1603                }
1604            }
1605        }
1606    }
1607    pthread_mutex_unlock(&mMutex);
1608    return;
1609}
1610
1611/*===========================================================================
1612 * FUNCTION   : translateCbMetadataToResultMetadata
1613 *
1614 * DESCRIPTION:
1615 *
1616 * PARAMETERS :
1617 *   @metadata : metadata information from callback
1618 *
1619 * RETURN     : camera_metadata_t*
1620 *              metadata in a format specified by fwk
1621 *==========================================================================*/
1622camera_metadata_t*
1623QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1624                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1625                                 int32_t request_id, int32_t BlobRequest,
1626                                 jpeg_settings_t* inputjpegsettings)
1627{
1628    CameraMetadata camMetadata;
1629    camera_metadata_t* resultMetadata;
1630
1631    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1632    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1633
1634    // Update the JPEG related info
1635    if (BlobRequest) {
1636        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1637        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1638
1639        int32_t thumbnailSizeTable[2];
1640        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1641        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1642        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1643        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1644               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1645
1646        if (inputjpegsettings->gps_coordinates[0]) {
1647            double gpsCoordinates[3];
1648            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1649            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1650            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1651            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1652            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1653                 gpsCoordinates[1],gpsCoordinates[2]);
1654        }
1655
1656        if (inputjpegsettings->gps_timestamp) {
1657            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1658            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1659        }
1660
1661        String8 str(inputjpegsettings->gps_processing_method);
1662        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1663            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1664        }
1665    }
1666    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1667    uint8_t next_entry;
1668    while (curr_entry != CAM_INTF_PARM_MAX) {
1669       switch (curr_entry) {
1670         case CAM_INTF_META_FACE_DETECTION:{
1671             cam_face_detection_data_t *faceDetectionInfo =
1672                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1673             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1674             int32_t faceIds[numFaces];
1675             uint8_t faceScores[numFaces];
1676             int32_t faceRectangles[numFaces * 4];
1677             int32_t faceLandmarks[numFaces * 6];
1678             int j = 0, k = 0;
1679             for (int i = 0; i < numFaces; i++) {
1680                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1681                 faceScores[i] = faceDetectionInfo->faces[i].score;
1682                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1683                         faceRectangles+j, -1);
1684                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1685                 j+= 4;
1686                 k+= 6;
1687             }
1688             if (numFaces > 0) {
1689                 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1690                 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1691                 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1692                     faceRectangles, numFaces*4);
1693                 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1694                     faceLandmarks, numFaces*6);
1695             }
1696            break;
1697            }
1698         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1699             uint8_t  *color_correct_mode =
1700                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1701             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1702             break;
1703          }
1704         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1705             int32_t  *ae_precapture_id =
1706                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1707             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1708             break;
1709          }
1710         case CAM_INTF_META_AEC_ROI: {
1711            cam_area_t  *hAeRegions =
1712                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1713             int32_t aeRegions[5];
1714             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1715             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1716             break;
1717          }
1718          case CAM_INTF_META_AEC_STATE:{
1719             uint8_t *ae_state =
1720                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1721             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1722             break;
1723          }
1724          case CAM_INTF_PARM_FOCUS_MODE:{
1725             uint8_t  *focusMode =
1726                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1727             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1728                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1729             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1730             break;
1731          }
1732          case CAM_INTF_META_AF_ROI:{
1733             /*af regions*/
1734             cam_area_t  *hAfRegions =
1735                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1736             int32_t afRegions[5];
1737             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1738             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1739             break;
1740          }
1741          case CAM_INTF_META_AF_STATE: {
1742             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1743             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1744             break;
1745          }
1746          case CAM_INTF_META_AF_TRIGGER_ID: {
1747             int32_t  *afTriggerId =
1748                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1749             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1750             break;
1751          }
1752          case CAM_INTF_PARM_WHITE_BALANCE: {
1753               uint8_t  *whiteBalance =
1754                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1755               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1756                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1757                   *whiteBalance);
1758               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1759               break;
1760          }
1761          case CAM_INTF_META_AWB_REGIONS: {
1762             /*awb regions*/
1763             cam_area_t  *hAwbRegions =
1764                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1765             int32_t awbRegions[5];
1766             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1767             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1768             break;
1769          }
1770          case CAM_INTF_META_AWB_STATE: {
1771             uint8_t  *whiteBalanceState =
1772                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1773             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1774             break;
1775          }
1776          case CAM_INTF_META_MODE: {
1777             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1778             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1779             break;
1780          }
1781          case CAM_INTF_META_EDGE_MODE: {
1782             cam_edge_application_t  *edgeApplication =
1783                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1784             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
1785             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
1786             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
1787             break;
1788          }
1789          case CAM_INTF_META_FLASH_POWER: {
1790             uint8_t  *flashPower =
1791                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1792             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1793             break;
1794          }
1795          case CAM_INTF_META_FLASH_FIRING_TIME: {
1796             int64_t  *flashFiringTime =
1797                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1798             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1799             break;
1800          }
1801          case CAM_INTF_META_FLASH_STATE: {
1802             uint8_t  *flashState =
1803                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1804             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1805             break;
1806          }
1807          case CAM_INTF_META_FLASH_MODE:{
1808             uint8_t *flashMode = (uint8_t*)
1809                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1810             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1811             break;
1812          }
1813          case CAM_INTF_META_HOTPIXEL_MODE: {
1814              uint8_t  *hotPixelMode =
1815                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1816              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1817              break;
1818          }
1819          case CAM_INTF_META_LENS_APERTURE:{
1820             float  *lensAperture =
1821                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1822             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1823             break;
1824          }
1825          case CAM_INTF_META_LENS_FILTERDENSITY: {
1826             float  *filterDensity =
1827                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1828             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1829             break;
1830          }
1831          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1832             float  *focalLength =
1833                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1834             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1835             break;
1836          }
1837          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1838             float  *focusDistance =
1839                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1840             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1841             break;
1842          }
1843          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1844             float  *focusRange =
1845                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1846             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1847             break;
1848          }
1849          case CAM_INTF_META_LENS_STATE: {
1850             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
1851             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
1852             break;
1853          }
1854          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1855             uint8_t  *opticalStab =
1856                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1857             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1858             break;
1859          }
1860          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1861             uint8_t  *noiseRedMode =
1862                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1863             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1864             break;
1865          }
1866          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
1867             uint8_t  *noiseRedStrength =
1868                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
1869             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
1870             break;
1871          }
1872          case CAM_INTF_META_SCALER_CROP_REGION: {
1873             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1874             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1875             int32_t scalerCropRegion[4];
1876             scalerCropRegion[0] = hScalerCropRegion->left;
1877             scalerCropRegion[1] = hScalerCropRegion->top;
1878             scalerCropRegion[2] = hScalerCropRegion->width;
1879             scalerCropRegion[3] = hScalerCropRegion->height;
1880             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1881             break;
1882          }
1883          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1884             int64_t  *sensorExpTime =
1885                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1886             mMetadataResponse.exposure_time = *sensorExpTime;
1887             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1888             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1889             break;
1890          }
1891          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1892             int64_t  *sensorFameDuration =
1893                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1894             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1895             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1896             break;
1897          }
1898          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1899             int32_t  *sensorSensitivity =
1900                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1901             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1902             mMetadataResponse.iso_speed = *sensorSensitivity;
1903             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1904             break;
1905          }
1906          case CAM_INTF_META_SHADING_MODE: {
1907             uint8_t  *shadingMode =
1908                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1909             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1910             break;
1911          }
1912          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1913             uint8_t  *faceDetectMode =
1914                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1915             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1916                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1917                                                        *faceDetectMode);
1918             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1919             break;
1920          }
1921          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1922             uint8_t  *histogramMode =
1923                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1924             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1925             break;
1926          }
1927          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1928               uint8_t  *sharpnessMapMode =
1929                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1930               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1931                                  sharpnessMapMode, 1);
1932               break;
1933           }
1934          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1935               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1936               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1937               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1938                                  (int32_t*)sharpnessMap->sharpness,
1939                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1940               break;
1941          }
1942          case CAM_INTF_META_LENS_SHADING_MAP: {
1943               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1944               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1945               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1946               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1947               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1948                                  (float*)lensShadingMap->lens_shading,
1949                                  4*map_width*map_height);
1950               break;
1951          }
1952          case CAM_INTF_META_TONEMAP_CURVES:{
1953             //Populate CAM_INTF_META_TONEMAP_CURVES
1954             /* ch0 = G, ch 1 = B, ch 2 = R*/
1955             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
1956             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
1957             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
1958                                (float*)tonemap->curves[0].tonemap_points,
1959                                tonemap->tonemap_points_cnt * 2);
1960
1961             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
1962                                (float*)tonemap->curves[1].tonemap_points,
1963                                tonemap->tonemap_points_cnt * 2);
1964
1965             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
1966                                (float*)tonemap->curves[2].tonemap_points,
1967                                tonemap->tonemap_points_cnt * 2);
1968             break;
1969          }
1970          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
1971             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1972             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1973             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1974             break;
1975          }
1976          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
1977              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1978              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1979              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1980                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1981              break;
1982          }
1983          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
1984             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1985             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1986             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1987                       predColorCorrectionGains->gains, 4);
1988             break;
1989          }
1990          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
1991             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1992                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1993             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1994                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1995             break;
1996
1997          }
1998          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
1999             uint8_t *blackLevelLock = (uint8_t*)
2000               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2001             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2002             break;
2003          }
2004          case CAM_INTF_META_SCENE_FLICKER:{
2005             uint8_t *sceneFlicker = (uint8_t*)
2006             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2007             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2008             break;
2009          }
2010          case CAM_INTF_PARM_LED_MODE:
2011             break;
2012          case CAM_INTF_PARM_EFFECT: {
2013             uint8_t *effectMode = (uint8_t*)
2014                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2015             uint8_t fwk_effectMode = lookupFwkName(EFFECT_MODES_MAP,
2016                                                    sizeof(EFFECT_MODES_MAP),
2017                                                    *effectMode);
2018             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2019             break;
2020          }
2021          default:
2022             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2023                   __func__, curr_entry);
2024             break;
2025       }
2026       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2027       curr_entry = next_entry;
2028    }
2029    resultMetadata = camMetadata.release();
2030    return resultMetadata;
2031}
2032
2033/*===========================================================================
2034 * FUNCTION   : convertToRegions
2035 *
2036 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2037 *
2038 * PARAMETERS :
2039 *   @rect   : cam_rect_t struct to convert
2040 *   @region : int32_t destination array
2041 *   @weight : if we are converting from cam_area_t, weight is valid
2042 *             else weight = -1
2043 *
2044 *==========================================================================*/
2045void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2046    region[0] = rect.left;
2047    region[1] = rect.top;
2048    region[2] = rect.left + rect.width;
2049    region[3] = rect.top + rect.height;
2050    if (weight > -1) {
2051        region[4] = weight;
2052    }
2053}
2054
2055/*===========================================================================
2056 * FUNCTION   : convertFromRegions
2057 *
2058 * DESCRIPTION: helper method to convert from array to cam_rect_t
2059 *
2060 * PARAMETERS :
2061 *   @rect   : cam_rect_t struct to convert
2062 *   @region : int32_t destination array
2063 *   @weight : if we are converting from cam_area_t, weight is valid
2064 *             else weight = -1
2065 *
2066 *==========================================================================*/
2067void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2068                                                   const camera_metadata_t *settings,
2069                                                   uint32_t tag){
2070    CameraMetadata frame_settings;
2071    frame_settings = settings;
2072    int32_t x_min = frame_settings.find(tag).data.i32[0];
2073    int32_t y_min = frame_settings.find(tag).data.i32[1];
2074    int32_t x_max = frame_settings.find(tag).data.i32[2];
2075    int32_t y_max = frame_settings.find(tag).data.i32[3];
2076    roi->weight = frame_settings.find(tag).data.i32[4];
2077    roi->rect.left = x_min;
2078    roi->rect.top = y_min;
2079    roi->rect.width = x_max - x_min;
2080    roi->rect.height = y_max - y_min;
2081}
2082
2083/*===========================================================================
2084 * FUNCTION   : resetIfNeededROI
2085 *
2086 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2087 *              crop region
2088 *
2089 * PARAMETERS :
2090 *   @roi       : cam_area_t struct to resize
2091 *   @scalerCropRegion : cam_crop_region_t region to compare against
2092 *
2093 *
2094 *==========================================================================*/
2095bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2096                                                 const cam_crop_region_t* scalerCropRegion)
2097{
2098    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2099    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2100    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2101    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2102    if ((roi_x_max < scalerCropRegion->left) ||
2103        (roi_y_max < scalerCropRegion->top)  ||
2104        (roi->rect.left > crop_x_max) ||
2105        (roi->rect.top > crop_y_max)){
2106        return false;
2107    }
2108    if (roi->rect.left < scalerCropRegion->left) {
2109        roi->rect.left = scalerCropRegion->left;
2110    }
2111    if (roi->rect.top < scalerCropRegion->top) {
2112        roi->rect.top = scalerCropRegion->top;
2113    }
2114    if (roi_x_max > crop_x_max) {
2115        roi_x_max = crop_x_max;
2116    }
2117    if (roi_y_max > crop_y_max) {
2118        roi_y_max = crop_y_max;
2119    }
2120    roi->rect.width = roi_x_max - roi->rect.left;
2121    roi->rect.height = roi_y_max - roi->rect.top;
2122    return true;
2123}
2124
2125/*===========================================================================
2126 * FUNCTION   : convertLandmarks
2127 *
2128 * DESCRIPTION: helper method to extract the landmarks from face detection info
2129 *
2130 * PARAMETERS :
2131 *   @face   : cam_rect_t struct to convert
2132 *   @landmarks : int32_t destination array
2133 *
2134 *
2135 *==========================================================================*/
2136void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2137{
2138    landmarks[0] = face.left_eye_center.x;
2139    landmarks[1] = face.left_eye_center.y;
2140    landmarks[2] = face.right_eye_center.x;
2141    landmarks[3] = face.right_eye_center.y;
2142    landmarks[4] = face.mouth_center.x;
2143    landmarks[5] = face.mouth_center.y;
2144}
2145
2146#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2147/*===========================================================================
2148 * FUNCTION   : initCapabilities
2149 *
2150 * DESCRIPTION: initialize camera capabilities in static data struct
2151 *
2152 * PARAMETERS :
2153 *   @cameraId  : camera Id
2154 *
2155 * RETURN     : int32_t type of status
2156 *              NO_ERROR  -- success
2157 *              none-zero failure code
2158 *==========================================================================*/
2159int QCamera3HardwareInterface::initCapabilities(int cameraId)
2160{
2161    int rc = 0;
2162    mm_camera_vtbl_t *cameraHandle = NULL;
2163    QCamera3HeapMemory *capabilityHeap = NULL;
2164
2165    cameraHandle = camera_open(cameraId);
2166    if (!cameraHandle) {
2167        ALOGE("%s: camera_open failed", __func__);
2168        rc = -1;
2169        goto open_failed;
2170    }
2171
2172    capabilityHeap = new QCamera3HeapMemory();
2173    if (capabilityHeap == NULL) {
2174        ALOGE("%s: creation of capabilityHeap failed", __func__);
2175        goto heap_creation_failed;
2176    }
2177    /* Allocate memory for capability buffer */
2178    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2179    if(rc != OK) {
2180        ALOGE("%s: No memory for cappability", __func__);
2181        goto allocate_failed;
2182    }
2183
2184    /* Map memory for capability buffer */
2185    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2186    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2187                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2188                                capabilityHeap->getFd(0),
2189                                sizeof(cam_capability_t));
2190    if(rc < 0) {
2191        ALOGE("%s: failed to map capability buffer", __func__);
2192        goto map_failed;
2193    }
2194
2195    /* Query Capability */
2196    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2197    if(rc < 0) {
2198        ALOGE("%s: failed to query capability",__func__);
2199        goto query_failed;
2200    }
2201    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2202    if (!gCamCapability[cameraId]) {
2203        ALOGE("%s: out of memory", __func__);
2204        goto query_failed;
2205    }
2206    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2207                                        sizeof(cam_capability_t));
2208    rc = 0;
2209
2210query_failed:
2211    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2212                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2213map_failed:
2214    capabilityHeap->deallocate();
2215allocate_failed:
2216    delete capabilityHeap;
2217heap_creation_failed:
2218    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2219    cameraHandle = NULL;
2220open_failed:
2221    return rc;
2222}
2223
2224/*===========================================================================
2225 * FUNCTION   : initParameters
2226 *
2227 * DESCRIPTION: initialize camera parameters
2228 *
2229 * PARAMETERS :
2230 *
2231 * RETURN     : int32_t type of status
2232 *              NO_ERROR  -- success
2233 *              none-zero failure code
2234 *==========================================================================*/
2235int QCamera3HardwareInterface::initParameters()
2236{
2237    int rc = 0;
2238
2239    //Allocate Set Param Buffer
2240    mParamHeap = new QCamera3HeapMemory();
2241    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2242    if(rc != OK) {
2243        rc = NO_MEMORY;
2244        ALOGE("Failed to allocate SETPARM Heap memory");
2245        delete mParamHeap;
2246        mParamHeap = NULL;
2247        return rc;
2248    }
2249
2250    //Map memory for parameters buffer
2251    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2252            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2253            mParamHeap->getFd(0),
2254            sizeof(parm_buffer_t));
2255    if(rc < 0) {
2256        ALOGE("%s:failed to map SETPARM buffer",__func__);
2257        rc = FAILED_TRANSACTION;
2258        mParamHeap->deallocate();
2259        delete mParamHeap;
2260        mParamHeap = NULL;
2261        return rc;
2262    }
2263
2264    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2265    return rc;
2266}
2267
2268/*===========================================================================
2269 * FUNCTION   : deinitParameters
2270 *
2271 * DESCRIPTION: de-initialize camera parameters
2272 *
2273 * PARAMETERS :
2274 *
2275 * RETURN     : NONE
2276 *==========================================================================*/
2277void QCamera3HardwareInterface::deinitParameters()
2278{
2279    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2280            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2281
2282    mParamHeap->deallocate();
2283    delete mParamHeap;
2284    mParamHeap = NULL;
2285
2286    mParameters = NULL;
2287}
2288
2289/*===========================================================================
2290 * FUNCTION   : calcMaxJpegSize
2291 *
2292 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2293 *
2294 * PARAMETERS :
2295 *
2296 * RETURN     : max_jpeg_size
2297 *==========================================================================*/
2298int QCamera3HardwareInterface::calcMaxJpegSize()
2299{
2300    int32_t max_jpeg_size = 0;
2301    int temp_width, temp_height;
2302    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2303        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2304        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2305        if (temp_width * temp_height > max_jpeg_size ) {
2306            max_jpeg_size = temp_width * temp_height;
2307        }
2308    }
2309    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2310    return max_jpeg_size;
2311}
2312
2313/*===========================================================================
2314 * FUNCTION   : initStaticMetadata
2315 *
2316 * DESCRIPTION: initialize the static metadata
2317 *
2318 * PARAMETERS :
2319 *   @cameraId  : camera Id
2320 *
2321 * RETURN     : int32_t type of status
2322 *              0  -- success
2323 *              non-zero failure code
2324 *==========================================================================*/
2325int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2326{
2327    int rc = 0;
2328    CameraMetadata staticInfo;
2329
2330    /* android.info: hardware level */
2331    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2332    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2333        &supportedHardwareLevel, 1);
2334
2335    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2336    /*HAL 3 only*/
2337    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2338                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2339
2340    /*hard coded for now but this should come from sensor*/
2341    float min_focus_distance;
2342    if(facingBack){
2343        min_focus_distance = 10;
2344    } else {
2345        min_focus_distance = 0;
2346    }
2347    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2348                    &min_focus_distance, 1);
2349
2350    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2351                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2352
2353    /*should be using focal lengths but sensor doesn't provide that info now*/
2354    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2355                      &gCamCapability[cameraId]->focal_length,
2356                      1);
2357
2358    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2359                      gCamCapability[cameraId]->apertures,
2360                      gCamCapability[cameraId]->apertures_count);
2361
2362    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2363                gCamCapability[cameraId]->filter_densities,
2364                gCamCapability[cameraId]->filter_densities_count);
2365
2366
2367    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2368                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2369                      gCamCapability[cameraId]->optical_stab_modes_count);
2370
2371    staticInfo.update(ANDROID_LENS_POSITION,
2372                      gCamCapability[cameraId]->lens_position,
2373                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2374
2375    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2376                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2377    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2378                      lens_shading_map_size,
2379                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2380
2381    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2382                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2383    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2384            geo_correction_map_size,
2385            sizeof(geo_correction_map_size)/sizeof(int32_t));
2386
2387    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2388                       gCamCapability[cameraId]->geo_correction_map,
2389                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2390
2391    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2392            gCamCapability[cameraId]->sensor_physical_size, 2);
2393
2394    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2395            gCamCapability[cameraId]->exposure_time_range, 2);
2396
2397    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2398            &gCamCapability[cameraId]->max_frame_duration, 1);
2399
2400    camera_metadata_rational baseGainFactor = {
2401            gCamCapability[cameraId]->base_gain_factor.numerator,
2402            gCamCapability[cameraId]->base_gain_factor.denominator};
2403    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2404                      &baseGainFactor, 1);
2405
2406    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2407                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2408
2409    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2410                                               gCamCapability[cameraId]->pixel_array_size.height};
2411    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2412                      pixel_array_size, 2);
2413
2414    int32_t active_array_size[] = {0, 0,
2415                                                gCamCapability[cameraId]->active_array_size.width,
2416                                                gCamCapability[cameraId]->active_array_size.height};
2417    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2418                      active_array_size, 4);
2419
2420    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2421            &gCamCapability[cameraId]->white_level, 1);
2422
2423    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2424            gCamCapability[cameraId]->black_level_pattern, 4);
2425
2426    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2427                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2428
2429    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2430                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2431
2432    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2433                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2434
2435    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2436                      &gCamCapability[cameraId]->histogram_size, 1);
2437
2438    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2439            &gCamCapability[cameraId]->max_histogram_count, 1);
2440
2441    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2442                                                gCamCapability[cameraId]->sharpness_map_size.height};
2443
2444    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2445            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2446
2447    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2448            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2449
2450
2451    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2452                      &gCamCapability[cameraId]->raw_min_duration,
2453                       1);
2454
2455    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2456                                                HAL_PIXEL_FORMAT_BLOB};
2457    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2458    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2459                      scalar_formats,
2460                      scalar_formats_count);
2461
2462    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2463    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2464              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2465              available_processed_sizes);
2466    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2467                available_processed_sizes,
2468                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2469
2470    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2471                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2472                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2473
2474    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2475    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2476                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2477                 available_fps_ranges);
2478    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2479            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2480
2481    camera_metadata_rational exposureCompensationStep = {
2482            gCamCapability[cameraId]->exp_compensation_step.numerator,
2483            gCamCapability[cameraId]->exp_compensation_step.denominator};
2484    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2485                      &exposureCompensationStep, 1);
2486
2487    /*TO DO*/
2488    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2489    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2490                      availableVstabModes, sizeof(availableVstabModes));
2491
2492    /*HAL 1 and HAL 3 common*/
2493    float maxZoom = 4;
2494    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2495            &maxZoom, 1);
2496
2497    int32_t max3aRegions = 1;
2498    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2499            &max3aRegions, 1);
2500
2501    uint8_t availableFaceDetectModes[] = {
2502            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2503            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2504    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2505                      availableFaceDetectModes,
2506                      sizeof(availableFaceDetectModes));
2507
2508    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2509                                                        gCamCapability[cameraId]->exposure_compensation_max};
2510    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2511            exposureCompensationRange,
2512            sizeof(exposureCompensationRange)/sizeof(int32_t));
2513
2514    uint8_t lensFacing = (facingBack) ?
2515            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2516    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2517
2518    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2519                available_processed_sizes,
2520                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2521
2522    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2523                      available_thumbnail_sizes,
2524                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2525
2526    int32_t max_jpeg_size = 0;
2527    int temp_width, temp_height;
2528    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2529        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2530        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2531        if (temp_width * temp_height > max_jpeg_size ) {
2532            max_jpeg_size = temp_width * temp_height;
2533        }
2534    }
2535    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2536    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2537                      &max_jpeg_size, 1);
2538
2539    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2540    int32_t size = 0;
2541    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2542        int val = lookupFwkName(EFFECT_MODES_MAP,
2543                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2544                                   gCamCapability[cameraId]->supported_effects[i]);
2545        if (val != NAME_NOT_FOUND) {
2546            avail_effects[size] = (uint8_t)val;
2547            size++;
2548        }
2549    }
2550    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2551                      avail_effects,
2552                      size);
2553
2554    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2555    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2556    int32_t supported_scene_modes_cnt = 0;
2557    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2558        int val = lookupFwkName(SCENE_MODES_MAP,
2559                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2560                                gCamCapability[cameraId]->supported_scene_modes[i]);
2561        if (val != NAME_NOT_FOUND) {
2562            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2563            supported_indexes[supported_scene_modes_cnt] = i;
2564            supported_scene_modes_cnt++;
2565        }
2566    }
2567
2568    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2569                      avail_scene_modes,
2570                      supported_scene_modes_cnt);
2571
2572    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2573    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2574                      supported_scene_modes_cnt,
2575                      scene_mode_overrides,
2576                      supported_indexes,
2577                      cameraId);
2578    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2579                      scene_mode_overrides,
2580                      supported_scene_modes_cnt*3);
2581
2582    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2583    size = 0;
2584    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2585        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2586                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2587                                 gCamCapability[cameraId]->supported_antibandings[i]);
2588        if (val != NAME_NOT_FOUND) {
2589            avail_antibanding_modes[size] = (uint8_t)val;
2590            size++;
2591        }
2592
2593    }
2594    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2595                      avail_antibanding_modes,
2596                      size);
2597
2598    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2599    size = 0;
2600    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2601        int val = lookupFwkName(FOCUS_MODES_MAP,
2602                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2603                                gCamCapability[cameraId]->supported_focus_modes[i]);
2604        if (val != NAME_NOT_FOUND) {
2605            avail_af_modes[size] = (uint8_t)val;
2606            size++;
2607        }
2608    }
2609    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2610                      avail_af_modes,
2611                      size);
2612
2613    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2614    size = 0;
2615    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2616        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2617                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2618                                    gCamCapability[cameraId]->supported_white_balances[i]);
2619        if (val != NAME_NOT_FOUND) {
2620            avail_awb_modes[size] = (uint8_t)val;
2621            size++;
2622        }
2623    }
2624    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2625                      avail_awb_modes,
2626                      size);
2627
2628    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2629    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2630      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2631
2632    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2633            available_flash_levels,
2634            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2635
2636
2637    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2638    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2639            &flashAvailable, 1);
2640
2641    uint8_t avail_ae_modes[5];
2642    size = 0;
2643    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2644        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2645        size++;
2646    }
2647    if (flashAvailable) {
2648        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2649        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2650        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2651    }
2652    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2653                      avail_ae_modes,
2654                      size);
2655
2656    int32_t sensitivity_range[2];
2657    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2658    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2659    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2660                      sensitivity_range,
2661                      sizeof(sensitivity_range) / sizeof(int32_t));
2662
2663    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2664                      &gCamCapability[cameraId]->max_analog_sensitivity,
2665                      1);
2666
2667    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2668                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2669                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2670
2671    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2672    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2673                      &sensor_orientation,
2674                      1);
2675
2676    int32_t max_output_streams[3] = {1, 3, 1};
2677    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2678                      max_output_streams,
2679                      3);
2680
2681    gStaticMetadata[cameraId] = staticInfo.release();
2682    return rc;
2683}
2684
2685/*===========================================================================
2686 * FUNCTION   : makeTable
2687 *
2688 * DESCRIPTION: make a table of sizes
2689 *
2690 * PARAMETERS :
2691 *
2692 *
2693 *==========================================================================*/
2694void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2695                                          int32_t* sizeTable)
2696{
2697    int j = 0;
2698    for (int i = 0; i < size; i++) {
2699        sizeTable[j] = dimTable[i].width;
2700        sizeTable[j+1] = dimTable[i].height;
2701        j+=2;
2702    }
2703}
2704
2705/*===========================================================================
2706 * FUNCTION   : makeFPSTable
2707 *
2708 * DESCRIPTION: make a table of fps ranges
2709 *
2710 * PARAMETERS :
2711 *
2712 *==========================================================================*/
2713void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2714                                          int32_t* fpsRangesTable)
2715{
2716    int j = 0;
2717    for (int i = 0; i < size; i++) {
2718        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2719        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2720        j+=2;
2721    }
2722}
2723
2724/*===========================================================================
2725 * FUNCTION   : makeOverridesList
2726 *
2727 * DESCRIPTION: make a list of scene mode overrides
2728 *
2729 * PARAMETERS :
2730 *
2731 *
2732 *==========================================================================*/
2733void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2734                                                  uint8_t size, uint8_t* overridesList,
2735                                                  uint8_t* supported_indexes,
2736                                                  int camera_id)
2737{
2738    /*daemon will give a list of overrides for all scene modes.
2739      However we should send the fwk only the overrides for the scene modes
2740      supported by the framework*/
2741    int j = 0, index = 0, supt = 0;
2742    uint8_t focus_override;
2743    for (int i = 0; i < size; i++) {
2744        supt = 0;
2745        index = supported_indexes[i];
2746        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2747        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2748                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2749                                                    overridesTable[index].awb_mode);
2750        focus_override = (uint8_t)overridesTable[index].af_mode;
2751        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2752           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2753              supt = 1;
2754              break;
2755           }
2756        }
2757        if (supt) {
2758           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2759                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2760                                              focus_override);
2761        } else {
2762           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2763        }
2764        j+=3;
2765    }
2766}
2767
2768/*===========================================================================
2769 * FUNCTION   : getPreviewHalPixelFormat
2770 *
2771 * DESCRIPTION: convert the format to type recognized by framework
2772 *
2773 * PARAMETERS : format : the format from backend
2774 *
2775 ** RETURN    : format recognized by framework
2776 *
2777 *==========================================================================*/
2778int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2779{
2780    int32_t halPixelFormat;
2781
2782    switch (format) {
2783    case CAM_FORMAT_YUV_420_NV12:
2784        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2785        break;
2786    case CAM_FORMAT_YUV_420_NV21:
2787        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2788        break;
2789    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2790        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2791        break;
2792    case CAM_FORMAT_YUV_420_YV12:
2793        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2794        break;
2795    case CAM_FORMAT_YUV_422_NV16:
2796    case CAM_FORMAT_YUV_422_NV61:
2797    default:
2798        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2799        break;
2800    }
2801    return halPixelFormat;
2802}
2803
2804/*===========================================================================
2805 * FUNCTION   : getSensorSensitivity
2806 *
2807 * DESCRIPTION: convert iso_mode to an integer value
2808 *
2809 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2810 *
2811 ** RETURN    : sensitivity supported by sensor
2812 *
2813 *==========================================================================*/
2814int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2815{
2816    int32_t sensitivity;
2817
2818    switch (iso_mode) {
2819    case CAM_ISO_MODE_100:
2820        sensitivity = 100;
2821        break;
2822    case CAM_ISO_MODE_200:
2823        sensitivity = 200;
2824        break;
2825    case CAM_ISO_MODE_400:
2826        sensitivity = 400;
2827        break;
2828    case CAM_ISO_MODE_800:
2829        sensitivity = 800;
2830        break;
2831    case CAM_ISO_MODE_1600:
2832        sensitivity = 1600;
2833        break;
2834    default:
2835        sensitivity = -1;
2836        break;
2837    }
2838    return sensitivity;
2839}
2840
2841
2842/*===========================================================================
2843 * FUNCTION   : AddSetParmEntryToBatch
2844 *
2845 * DESCRIPTION: add set parameter entry into batch
2846 *
2847 * PARAMETERS :
2848 *   @p_table     : ptr to parameter buffer
2849 *   @paramType   : parameter type
2850 *   @paramLength : length of parameter value
2851 *   @paramValue  : ptr to parameter value
2852 *
2853 * RETURN     : int32_t type of status
2854 *              NO_ERROR  -- success
2855 *              none-zero failure code
2856 *==========================================================================*/
2857int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2858                                                          cam_intf_parm_type_t paramType,
2859                                                          uint32_t paramLength,
2860                                                          void *paramValue)
2861{
2862    int position = paramType;
2863    int current, next;
2864
2865    /*************************************************************************
2866    *                 Code to take care of linking next flags                *
2867    *************************************************************************/
2868    current = GET_FIRST_PARAM_ID(p_table);
2869    if (position == current){
2870        //DO NOTHING
2871    } else if (position < current){
2872        SET_NEXT_PARAM_ID(position, p_table, current);
2873        SET_FIRST_PARAM_ID(p_table, position);
2874    } else {
2875        /* Search for the position in the linked list where we need to slot in*/
2876        while (position > GET_NEXT_PARAM_ID(current, p_table))
2877            current = GET_NEXT_PARAM_ID(current, p_table);
2878
2879        /*If node already exists no need to alter linking*/
2880        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2881            next = GET_NEXT_PARAM_ID(current, p_table);
2882            SET_NEXT_PARAM_ID(current, p_table, position);
2883            SET_NEXT_PARAM_ID(position, p_table, next);
2884        }
2885    }
2886
2887    /*************************************************************************
2888    *                   Copy contents into entry                             *
2889    *************************************************************************/
2890
2891    if (paramLength > sizeof(parm_type_t)) {
2892        ALOGE("%s:Size of input larger than max entry size",__func__);
2893        return BAD_VALUE;
2894    }
2895    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2896    return NO_ERROR;
2897}
2898
2899/*===========================================================================
2900 * FUNCTION   : lookupFwkName
2901 *
2902 * DESCRIPTION: In case the enum is not same in fwk and backend
2903 *              make sure the parameter is correctly propogated
2904 *
2905 * PARAMETERS  :
2906 *   @arr      : map between the two enums
2907 *   @len      : len of the map
2908 *   @hal_name : name of the hal_parm to map
2909 *
2910 * RETURN     : int type of status
2911 *              fwk_name  -- success
2912 *              none-zero failure code
2913 *==========================================================================*/
2914int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2915                                             int len, int hal_name)
2916{
2917
2918    for (int i = 0; i < len; i++) {
2919        if (arr[i].hal_name == hal_name)
2920            return arr[i].fwk_name;
2921    }
2922
2923    /* Not able to find matching framework type is not necessarily
2924     * an error case. This happens when mm-camera supports more attributes
2925     * than the frameworks do */
2926    ALOGD("%s: Cannot find matching framework type", __func__);
2927    return NAME_NOT_FOUND;
2928}
2929
2930/*===========================================================================
2931 * FUNCTION   : lookupHalName
2932 *
2933 * DESCRIPTION: In case the enum is not same in fwk and backend
2934 *              make sure the parameter is correctly propogated
2935 *
2936 * PARAMETERS  :
2937 *   @arr      : map between the two enums
2938 *   @len      : len of the map
2939 *   @fwk_name : name of the hal_parm to map
2940 *
2941 * RETURN     : int32_t type of status
2942 *              hal_name  -- success
2943 *              none-zero failure code
2944 *==========================================================================*/
2945int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2946                                             int len, int fwk_name)
2947{
2948    for (int i = 0; i < len; i++) {
2949       if (arr[i].fwk_name == fwk_name)
2950           return arr[i].hal_name;
2951    }
2952    ALOGE("%s: Cannot find matching hal type", __func__);
2953    return NAME_NOT_FOUND;
2954}
2955
2956/*===========================================================================
2957 * FUNCTION   : getCapabilities
2958 *
2959 * DESCRIPTION: query camera capabilities
2960 *
2961 * PARAMETERS :
2962 *   @cameraId  : camera Id
2963 *   @info      : camera info struct to be filled in with camera capabilities
2964 *
2965 * RETURN     : int32_t type of status
2966 *              NO_ERROR  -- success
2967 *              none-zero failure code
2968 *==========================================================================*/
2969int QCamera3HardwareInterface::getCamInfo(int cameraId,
2970                                    struct camera_info *info)
2971{
2972    int rc = 0;
2973
2974    if (NULL == gCamCapability[cameraId]) {
2975        rc = initCapabilities(cameraId);
2976        if (rc < 0) {
2977            //pthread_mutex_unlock(&g_camlock);
2978            return rc;
2979        }
2980    }
2981
2982    if (NULL == gStaticMetadata[cameraId]) {
2983        rc = initStaticMetadata(cameraId);
2984        if (rc < 0) {
2985            return rc;
2986        }
2987    }
2988
2989    switch(gCamCapability[cameraId]->position) {
2990    case CAM_POSITION_BACK:
2991        info->facing = CAMERA_FACING_BACK;
2992        break;
2993
2994    case CAM_POSITION_FRONT:
2995        info->facing = CAMERA_FACING_FRONT;
2996        break;
2997
2998    default:
2999        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3000        rc = -1;
3001        break;
3002    }
3003
3004
3005    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3006    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
3007    info->static_camera_characteristics = gStaticMetadata[cameraId];
3008
3009    return rc;
3010}
3011
3012/*===========================================================================
3013 * FUNCTION   : translateMetadata
3014 *
3015 * DESCRIPTION: translate the metadata into camera_metadata_t
3016 *
3017 * PARAMETERS : type of the request
3018 *
3019 *
3020 * RETURN     : success: camera_metadata_t*
3021 *              failure: NULL
3022 *
3023 *==========================================================================*/
3024camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3025{
3026    pthread_mutex_lock(&mMutex);
3027
3028    if (mDefaultMetadata[type] != NULL) {
3029        pthread_mutex_unlock(&mMutex);
3030        return mDefaultMetadata[type];
3031    }
3032    //first time we are handling this request
3033    //fill up the metadata structure using the wrapper class
3034    CameraMetadata settings;
3035    //translate from cam_capability_t to camera_metadata_tag_t
3036    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3037    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3038    int32_t defaultRequestID = 0;
3039    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3040
3041    /*control*/
3042
3043    uint8_t controlIntent = 0;
3044    switch (type) {
3045      case CAMERA3_TEMPLATE_PREVIEW:
3046        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3047        break;
3048      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3049        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3050        break;
3051      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3052        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3053        break;
3054      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3055        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3056        break;
3057      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3058        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3059        break;
3060      default:
3061        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3062        break;
3063    }
3064    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3065
3066    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3067            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3068
3069    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3070    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3071
3072    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3073    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3074
3075    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3076    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3077
3078    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3079    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3080
3081    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3082    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3083
3084    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3085    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3086
3087    static uint8_t focusMode;
3088    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
3089        ALOGE("%s: Setting focus mode to auto", __func__);
3090        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
3091    } else {
3092        ALOGE("%s: Setting focus mode to off", __func__);
3093        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3094    }
3095    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3096
3097    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3098    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3099
3100    /*flash*/
3101    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3102    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3103
3104    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3105    settings.update(ANDROID_FLASH_FIRING_POWER,
3106            &flashFiringLevel, 1);
3107
3108    /* lens */
3109    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3110    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3111
3112    if (gCamCapability[mCameraId]->filter_densities_count) {
3113        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3114        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3115                        gCamCapability[mCameraId]->filter_densities_count);
3116    }
3117
3118    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3119    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3120
3121    /* Exposure time(Update the Min Exposure Time)*/
3122    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3123    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3124
3125    /* frame duration */
3126    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3127    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3128
3129    /* sensitivity */
3130    static const int32_t default_sensitivity = 100;
3131    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3132
3133    /*edge mode*/
3134    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3135    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3136
3137    /*noise reduction mode*/
3138    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3139    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3140
3141    /*color correction mode*/
3142    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3143    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3144
3145    /*transform matrix mode*/
3146    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3147    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3148
3149    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3150    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3151
3152    int32_t scaler_crop_region[4];
3153    scaler_crop_region[0] = 0;
3154    scaler_crop_region[1] = 0;
3155    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3156    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3157    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3158
3159    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3160    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3161
3162    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3163    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3164
3165    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3166                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3167                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3168    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3169
3170    mDefaultMetadata[type] = settings.release();
3171
3172    pthread_mutex_unlock(&mMutex);
3173    return mDefaultMetadata[type];
3174}
3175
3176/*===========================================================================
3177 * FUNCTION   : setFrameParameters
3178 *
3179 * DESCRIPTION: set parameters per frame as requested in the metadata from
3180 *              framework
3181 *
3182 * PARAMETERS :
3183 *   @request   : request that needs to be serviced
3184 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3185 *
3186 * RETURN     : success: NO_ERROR
3187 *              failure:
3188 *==========================================================================*/
3189int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3190                    uint32_t streamTypeMask)
3191{
3192    /*translate from camera_metadata_t type to parm_type_t*/
3193    int rc = 0;
3194    if (request->settings == NULL && mFirstRequest) {
3195        /*settings cannot be null for the first request*/
3196        return BAD_VALUE;
3197    }
3198
3199    int32_t hal_version = CAM_HAL_V3;
3200
3201    memset(mParameters, 0, sizeof(parm_buffer_t));
3202    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3203    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3204                sizeof(hal_version), &hal_version);
3205    if (rc < 0) {
3206        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3207        return BAD_VALUE;
3208    }
3209
3210    /*we need to update the frame number in the parameters*/
3211    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3212                                sizeof(request->frame_number), &(request->frame_number));
3213    if (rc < 0) {
3214        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3215        return BAD_VALUE;
3216    }
3217
3218    /* Update stream id mask where buffers are requested */
3219    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3220                                sizeof(streamTypeMask), &streamTypeMask);
3221    if (rc < 0) {
3222        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3223        return BAD_VALUE;
3224    }
3225
3226    if(request->settings != NULL){
3227        rc = translateMetadataToParameters(request);
3228    }
3229    /*set the parameters to backend*/
3230    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3231    return rc;
3232}
3233
3234/*===========================================================================
3235 * FUNCTION   : translateMetadataToParameters
3236 *
3237 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3238 *
3239 *
3240 * PARAMETERS :
3241 *   @request  : request sent from framework
3242 *
3243 *
3244 * RETURN     : success: NO_ERROR
3245 *              failure:
3246 *==========================================================================*/
3247int QCamera3HardwareInterface::translateMetadataToParameters
3248                                  (const camera3_capture_request_t *request)
3249{
3250    int rc = 0;
3251    CameraMetadata frame_settings;
3252    frame_settings = request->settings;
3253
3254    /* Do not change the order of the following list unless you know what you are
3255     * doing.
3256     * The order is laid out in such a way that parameters in the front of the table
3257     * may be used to override the parameters later in the table. Examples are:
3258     * 1. META_MODE should precede AEC/AWB/AF MODE
3259     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3260     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3261     * 4. Any mode should precede it's corresponding settings
3262     */
3263    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3264        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3265        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3266                sizeof(metaMode), &metaMode);
3267        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3268           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3269           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3270                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3271                                             fwk_sceneMode);
3272           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3273                sizeof(sceneMode), &sceneMode);
3274        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3275           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3276           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3277                sizeof(sceneMode), &sceneMode);
3278        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3279           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3280           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3281                sizeof(sceneMode), &sceneMode);
3282        }
3283    }
3284
3285    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3286        uint8_t fwk_aeMode =
3287            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3288        uint8_t aeMode;
3289        int32_t redeye;
3290
3291        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3292            aeMode = CAM_AE_MODE_OFF;
3293        } else {
3294            aeMode = CAM_AE_MODE_ON;
3295        }
3296        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3297            redeye = 1;
3298        } else {
3299            redeye = 0;
3300        }
3301
3302        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3303                                          sizeof(AE_FLASH_MODE_MAP),
3304                                          fwk_aeMode);
3305        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3306                sizeof(aeMode), &aeMode);
3307        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3308                sizeof(flashMode), &flashMode);
3309        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3310                sizeof(redeye), &redeye);
3311    }
3312
3313    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3314        uint8_t fwk_whiteLevel =
3315            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3316        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3317                sizeof(WHITE_BALANCE_MODES_MAP),
3318                fwk_whiteLevel);
3319        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3320                sizeof(whiteLevel), &whiteLevel);
3321    }
3322
3323    float focalDistance = -1.0;
3324    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3325        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3326        rc = AddSetParmEntryToBatch(mParameters,
3327                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3328                sizeof(focalDistance), &focalDistance);
3329    }
3330
3331    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3332        uint8_t fwk_focusMode =
3333            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3334        uint8_t focusMode;
3335        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3336            focusMode = CAM_FOCUS_MODE_INFINITY;
3337        } else{
3338         focusMode = lookupHalName(FOCUS_MODES_MAP,
3339                                   sizeof(FOCUS_MODES_MAP),
3340                                   fwk_focusMode);
3341        }
3342        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3343                sizeof(focusMode), &focusMode);
3344    }
3345
3346    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3347        int32_t antibandingMode =
3348            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3350                sizeof(antibandingMode), &antibandingMode);
3351    }
3352
3353    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3354        int32_t expCompensation = frame_settings.find(
3355            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3356        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3357            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3358        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3359            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3360        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3361          sizeof(expCompensation), &expCompensation);
3362    }
3363
3364    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3365        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3366        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3367                sizeof(aeLock), &aeLock);
3368    }
3369    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3370        cam_fps_range_t fps_range;
3371        fps_range.min_fps =
3372            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3373        fps_range.max_fps =
3374            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3375        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3376                sizeof(fps_range), &fps_range);
3377    }
3378
3379    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3380        uint8_t awbLock =
3381            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3382        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3383                sizeof(awbLock), &awbLock);
3384    }
3385
3386    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3387        uint8_t fwk_effectMode =
3388            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3389        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3390                sizeof(EFFECT_MODES_MAP),
3391                fwk_effectMode);
3392        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3393                sizeof(effectMode), &effectMode);
3394    }
3395
3396    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3397        uint8_t colorCorrectMode =
3398            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3399        rc =
3400            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3401                    sizeof(colorCorrectMode), &colorCorrectMode);
3402    }
3403
3404    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3405        cam_color_correct_gains_t colorCorrectGains;
3406        for (int i = 0; i < 4; i++) {
3407            colorCorrectGains.gains[i] =
3408                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3409        }
3410        rc =
3411            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3412                    sizeof(colorCorrectGains), &colorCorrectGains);
3413    }
3414
3415    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3416        cam_color_correct_matrix_t colorCorrectTransform;
3417        cam_rational_type_t transform_elem;
3418        int num = 0;
3419        for (int i = 0; i < 3; i++) {
3420           for (int j = 0; j < 3; j++) {
3421              transform_elem.numerator =
3422                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3423              transform_elem.denominator =
3424                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3425              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3426              num++;
3427           }
3428        }
3429        rc =
3430            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3431                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3432    }
3433
3434    cam_trigger_t aecTrigger;
3435    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3436    aecTrigger.trigger_id = -1;
3437    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3438        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3439        aecTrigger.trigger =
3440            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3441        aecTrigger.trigger_id =
3442            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3443    }
3444    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3445                                sizeof(aecTrigger), &aecTrigger);
3446
3447    /*af_trigger must come with a trigger id*/
3448    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3449        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3450        cam_trigger_t af_trigger;
3451        af_trigger.trigger =
3452            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3453        af_trigger.trigger_id =
3454            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3455        rc = AddSetParmEntryToBatch(mParameters,
3456                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3457    }
3458
3459    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3460        int32_t demosaic =
3461            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3462        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3463                sizeof(demosaic), &demosaic);
3464    }
3465
3466    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3467        cam_edge_application_t edge_application;
3468        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3469        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3470            edge_application.sharpness = 0;
3471        } else {
3472            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3473                uint8_t edgeStrength =
3474                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
3475                edge_application.sharpness = (int32_t)edgeStrength;
3476            } else {
3477                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3478            }
3479        }
3480        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3481                sizeof(edge_application), &edge_application);
3482    }
3483
3484    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3485        int32_t respectFlashMode = 1;
3486        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3487            uint8_t fwk_aeMode =
3488                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3489            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3490                respectFlashMode = 0;
3491                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3492                    __func__);
3493            }
3494        }
3495        if (respectFlashMode) {
3496            uint8_t flashMode =
3497                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3498            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3499                                          sizeof(FLASH_MODES_MAP),
3500                                          flashMode);
3501            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3502            // To check: CAM_INTF_META_FLASH_MODE usage
3503            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3504                          sizeof(flashMode), &flashMode);
3505        }
3506    }
3507
3508    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3509        uint8_t flashPower =
3510            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3511        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3512                sizeof(flashPower), &flashPower);
3513    }
3514
3515    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3516        int64_t flashFiringTime =
3517            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3518        rc = AddSetParmEntryToBatch(mParameters,
3519                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3520    }
3521
3522    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3523        uint8_t geometricMode =
3524            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3525        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3526                sizeof(geometricMode), &geometricMode);
3527    }
3528
3529    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3530        uint8_t geometricStrength =
3531            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3532        rc = AddSetParmEntryToBatch(mParameters,
3533                CAM_INTF_META_GEOMETRIC_STRENGTH,
3534                sizeof(geometricStrength), &geometricStrength);
3535    }
3536
3537    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3538        uint8_t hotPixelMode =
3539            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3540        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3541                sizeof(hotPixelMode), &hotPixelMode);
3542    }
3543
3544    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3545        float lensAperture =
3546            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3547        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3548                sizeof(lensAperture), &lensAperture);
3549    }
3550
3551    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3552        float filterDensity =
3553            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3554        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3555                sizeof(filterDensity), &filterDensity);
3556    }
3557
3558    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3559        float focalLength =
3560            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3561        rc = AddSetParmEntryToBatch(mParameters,
3562                CAM_INTF_META_LENS_FOCAL_LENGTH,
3563                sizeof(focalLength), &focalLength);
3564    }
3565
3566    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3567        uint8_t optStabMode =
3568            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3569        rc = AddSetParmEntryToBatch(mParameters,
3570                CAM_INTF_META_LENS_OPT_STAB_MODE,
3571                sizeof(optStabMode), &optStabMode);
3572    }
3573
3574    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3575        uint8_t noiseRedMode =
3576            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3577        rc = AddSetParmEntryToBatch(mParameters,
3578                CAM_INTF_META_NOISE_REDUCTION_MODE,
3579                sizeof(noiseRedMode), &noiseRedMode);
3580    }
3581
3582    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3583        uint8_t noiseRedStrength =
3584            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3585        rc = AddSetParmEntryToBatch(mParameters,
3586                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3587                sizeof(noiseRedStrength), &noiseRedStrength);
3588    }
3589
3590    cam_crop_region_t scalerCropRegion;
3591    bool scalerCropSet = false;
3592    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3593        scalerCropRegion.left =
3594            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3595        scalerCropRegion.top =
3596            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3597        scalerCropRegion.width =
3598            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3599        scalerCropRegion.height =
3600            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3601        rc = AddSetParmEntryToBatch(mParameters,
3602                CAM_INTF_META_SCALER_CROP_REGION,
3603                sizeof(scalerCropRegion), &scalerCropRegion);
3604        scalerCropSet = true;
3605    }
3606
3607    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3608        int64_t sensorExpTime =
3609            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3610        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3611        rc = AddSetParmEntryToBatch(mParameters,
3612                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3613                sizeof(sensorExpTime), &sensorExpTime);
3614    }
3615
3616    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3617        int64_t sensorFrameDuration =
3618            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3619        int64_t minFrameDuration = getMinFrameDuration(request);
3620        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3621        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3622            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3623        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3624        rc = AddSetParmEntryToBatch(mParameters,
3625                CAM_INTF_META_SENSOR_FRAME_DURATION,
3626                sizeof(sensorFrameDuration), &sensorFrameDuration);
3627    }
3628
3629    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3630        int32_t sensorSensitivity =
3631            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3632        if (sensorSensitivity <
3633                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3634            sensorSensitivity =
3635                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3636        if (sensorSensitivity >
3637                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3638            sensorSensitivity =
3639                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3640        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3641        rc = AddSetParmEntryToBatch(mParameters,
3642                CAM_INTF_META_SENSOR_SENSITIVITY,
3643                sizeof(sensorSensitivity), &sensorSensitivity);
3644    }
3645
3646    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3647        int32_t shadingMode =
3648            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3649        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3650                sizeof(shadingMode), &shadingMode);
3651    }
3652
3653    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3654        uint8_t shadingStrength =
3655            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3656        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3657                sizeof(shadingStrength), &shadingStrength);
3658    }
3659
3660    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3661        uint8_t fwk_facedetectMode =
3662            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3663        uint8_t facedetectMode =
3664            lookupHalName(FACEDETECT_MODES_MAP,
3665                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3666        rc = AddSetParmEntryToBatch(mParameters,
3667                CAM_INTF_META_STATS_FACEDETECT_MODE,
3668                sizeof(facedetectMode), &facedetectMode);
3669    }
3670
3671    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3672        uint8_t histogramMode =
3673            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3674        rc = AddSetParmEntryToBatch(mParameters,
3675                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3676                sizeof(histogramMode), &histogramMode);
3677    }
3678
3679    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3680        uint8_t sharpnessMapMode =
3681            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3682        rc = AddSetParmEntryToBatch(mParameters,
3683                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3684                sizeof(sharpnessMapMode), &sharpnessMapMode);
3685    }
3686
3687    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3688        uint8_t tonemapMode =
3689            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3690        rc = AddSetParmEntryToBatch(mParameters,
3691                CAM_INTF_META_TONEMAP_MODE,
3692                sizeof(tonemapMode), &tonemapMode);
3693    }
3694    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3695    /*All tonemap channels will have the same number of points*/
3696    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3697        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3698        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3699        cam_rgb_tonemap_curves tonemapCurves;
3700        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3701
3702        /* ch0 = G*/
3703        int point = 0;
3704        cam_tonemap_curve_t tonemapCurveGreen;
3705        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3706            for (int j = 0; j < 2; j++) {
3707               tonemapCurveGreen.tonemap_points[i][j] =
3708                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3709               point++;
3710            }
3711        }
3712        tonemapCurves.curves[0] = tonemapCurveGreen;
3713
3714        /* ch 1 = B */
3715        point = 0;
3716        cam_tonemap_curve_t tonemapCurveBlue;
3717        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3718            for (int j = 0; j < 2; j++) {
3719               tonemapCurveBlue.tonemap_points[i][j] =
3720                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3721               point++;
3722            }
3723        }
3724        tonemapCurves.curves[1] = tonemapCurveBlue;
3725
3726        /* ch 2 = R */
3727        point = 0;
3728        cam_tonemap_curve_t tonemapCurveRed;
3729        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3730            for (int j = 0; j < 2; j++) {
3731               tonemapCurveRed.tonemap_points[i][j] =
3732                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3733               point++;
3734            }
3735        }
3736        tonemapCurves.curves[2] = tonemapCurveRed;
3737
3738        rc = AddSetParmEntryToBatch(mParameters,
3739                CAM_INTF_META_TONEMAP_CURVES,
3740                sizeof(tonemapCurves), &tonemapCurves);
3741    }
3742
3743    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3744        uint8_t captureIntent =
3745            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3746        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3747                sizeof(captureIntent), &captureIntent);
3748    }
3749
3750    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3751        uint8_t blackLevelLock =
3752            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3753        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3754                sizeof(blackLevelLock), &blackLevelLock);
3755    }
3756
3757    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3758        uint8_t lensShadingMapMode =
3759            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3760        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3761                sizeof(lensShadingMapMode), &lensShadingMapMode);
3762    }
3763
3764    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3765        cam_area_t roi;
3766        bool reset = true;
3767        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3768        if (scalerCropSet) {
3769            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3770        }
3771        if (reset) {
3772            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3773                    sizeof(roi), &roi);
3774        }
3775    }
3776
3777    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3778        cam_area_t roi;
3779        bool reset = true;
3780        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3781        if (scalerCropSet) {
3782            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3783        }
3784        if (reset) {
3785            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3786                    sizeof(roi), &roi);
3787        }
3788    }
3789
3790    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3791        cam_area_t roi;
3792        bool reset = true;
3793        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3794        if (scalerCropSet) {
3795            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3796        }
3797        if (reset) {
3798            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3799                    sizeof(roi), &roi);
3800        }
3801    }
3802    return rc;
3803}
3804
3805/*===========================================================================
3806 * FUNCTION   : getJpegSettings
3807 *
3808 * DESCRIPTION: save the jpeg settings in the HAL
3809 *
3810 *
3811 * PARAMETERS :
3812 *   @settings  : frame settings information from framework
3813 *
3814 *
3815 * RETURN     : success: NO_ERROR
3816 *              failure:
3817 *==========================================================================*/
3818int QCamera3HardwareInterface::getJpegSettings
3819                                  (const camera_metadata_t *settings)
3820{
3821    if (mJpegSettings) {
3822        if (mJpegSettings->gps_timestamp) {
3823            free(mJpegSettings->gps_timestamp);
3824            mJpegSettings->gps_timestamp = NULL;
3825        }
3826        if (mJpegSettings->gps_coordinates) {
3827            for (int i = 0; i < 3; i++) {
3828                free(mJpegSettings->gps_coordinates[i]);
3829                mJpegSettings->gps_coordinates[i] = NULL;
3830            }
3831        }
3832        free(mJpegSettings);
3833        mJpegSettings = NULL;
3834    }
3835    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3836    CameraMetadata jpeg_settings;
3837    jpeg_settings = settings;
3838
3839    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3840        mJpegSettings->jpeg_orientation =
3841            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3842    } else {
3843        mJpegSettings->jpeg_orientation = 0;
3844    }
3845    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3846        mJpegSettings->jpeg_quality =
3847            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3848    } else {
3849        mJpegSettings->jpeg_quality = 85;
3850    }
3851    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3852        mJpegSettings->thumbnail_size.width =
3853            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3854        mJpegSettings->thumbnail_size.height =
3855            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3856    } else {
3857        mJpegSettings->thumbnail_size.width = 0;
3858        mJpegSettings->thumbnail_size.height = 0;
3859    }
3860    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3861        for (int i = 0; i < 3; i++) {
3862            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3863            *(mJpegSettings->gps_coordinates[i]) =
3864                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3865        }
3866    } else{
3867       for (int i = 0; i < 3; i++) {
3868            mJpegSettings->gps_coordinates[i] = NULL;
3869        }
3870    }
3871
3872    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3873        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3874        *(mJpegSettings->gps_timestamp) =
3875            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3876    } else {
3877        mJpegSettings->gps_timestamp = NULL;
3878    }
3879
3880    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3881        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3882        for (int i = 0; i < len; i++) {
3883            mJpegSettings->gps_processing_method[i] =
3884                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3885        }
3886        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3887            mJpegSettings->gps_processing_method[len] = '\0';
3888        }
3889    } else {
3890        mJpegSettings->gps_processing_method[0] = '\0';
3891    }
3892
3893    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3894        mJpegSettings->sensor_sensitivity =
3895            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3896    } else {
3897        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3898    }
3899
3900    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3901
3902    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3903        mJpegSettings->lens_focal_length =
3904            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3905    }
3906    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3907        mJpegSettings->exposure_compensation =
3908            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3909    }
3910    mJpegSettings->sharpness = 10; //default value
3911    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3912        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3913        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3914            mJpegSettings->sharpness = 0;
3915        }
3916    }
3917    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3918    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3919    mJpegSettings->is_jpeg_format = true;
3920    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3921    return 0;
3922}
3923
3924/*===========================================================================
3925 * FUNCTION   : captureResultCb
3926 *
3927 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3928 *
3929 * PARAMETERS :
3930 *   @frame  : frame information from mm-camera-interface
3931 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3932 *   @userdata: userdata
3933 *
3934 * RETURN     : NONE
3935 *==========================================================================*/
3936void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3937                camera3_stream_buffer_t *buffer,
3938                uint32_t frame_number, void *userdata)
3939{
3940    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3941    if (hw == NULL) {
3942        ALOGE("%s: Invalid hw %p", __func__, hw);
3943        return;
3944    }
3945
3946    hw->captureResultCb(metadata, buffer, frame_number);
3947    return;
3948}
3949
3950
3951/*===========================================================================
3952 * FUNCTION   : initialize
3953 *
3954 * DESCRIPTION: Pass framework callback pointers to HAL
3955 *
3956 * PARAMETERS :
3957 *
3958 *
3959 * RETURN     : Success : 0
3960 *              Failure: -ENODEV
3961 *==========================================================================*/
3962
3963int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3964                                  const camera3_callback_ops_t *callback_ops)
3965{
3966    ALOGV("%s: E", __func__);
3967    QCamera3HardwareInterface *hw =
3968        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3969    if (!hw) {
3970        ALOGE("%s: NULL camera device", __func__);
3971        return -ENODEV;
3972    }
3973
3974    int rc = hw->initialize(callback_ops);
3975    ALOGV("%s: X", __func__);
3976    return rc;
3977}
3978
3979/*===========================================================================
3980 * FUNCTION   : configure_streams
3981 *
3982 * DESCRIPTION:
3983 *
3984 * PARAMETERS :
3985 *
3986 *
3987 * RETURN     : Success: 0
3988 *              Failure: -EINVAL (if stream configuration is invalid)
3989 *                       -ENODEV (fatal error)
3990 *==========================================================================*/
3991
3992int QCamera3HardwareInterface::configure_streams(
3993        const struct camera3_device *device,
3994        camera3_stream_configuration_t *stream_list)
3995{
3996    ALOGV("%s: E", __func__);
3997    QCamera3HardwareInterface *hw =
3998        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3999    if (!hw) {
4000        ALOGE("%s: NULL camera device", __func__);
4001        return -ENODEV;
4002    }
4003    int rc = hw->configureStreams(stream_list);
4004    ALOGV("%s: X", __func__);
4005    return rc;
4006}
4007
4008/*===========================================================================
4009 * FUNCTION   : register_stream_buffers
4010 *
4011 * DESCRIPTION: Register stream buffers with the device
4012 *
4013 * PARAMETERS :
4014 *
4015 * RETURN     :
4016 *==========================================================================*/
4017int QCamera3HardwareInterface::register_stream_buffers(
4018        const struct camera3_device *device,
4019        const camera3_stream_buffer_set_t *buffer_set)
4020{
4021    ALOGV("%s: E", __func__);
4022    QCamera3HardwareInterface *hw =
4023        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4024    if (!hw) {
4025        ALOGE("%s: NULL camera device", __func__);
4026        return -ENODEV;
4027    }
4028    int rc = hw->registerStreamBuffers(buffer_set);
4029    ALOGV("%s: X", __func__);
4030    return rc;
4031}
4032
4033/*===========================================================================
4034 * FUNCTION   : construct_default_request_settings
4035 *
4036 * DESCRIPTION: Configure a settings buffer to meet the required use case
4037 *
4038 * PARAMETERS :
4039 *
4040 *
4041 * RETURN     : Success: Return valid metadata
4042 *              Failure: Return NULL
4043 *==========================================================================*/
4044const camera_metadata_t* QCamera3HardwareInterface::
4045    construct_default_request_settings(const struct camera3_device *device,
4046                                        int type)
4047{
4048
4049    ALOGV("%s: E", __func__);
4050    camera_metadata_t* fwk_metadata = NULL;
4051    QCamera3HardwareInterface *hw =
4052        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4053    if (!hw) {
4054        ALOGE("%s: NULL camera device", __func__);
4055        return NULL;
4056    }
4057
4058    fwk_metadata = hw->translateCapabilityToMetadata(type);
4059
4060    ALOGV("%s: X", __func__);
4061    return fwk_metadata;
4062}
4063
4064/*===========================================================================
4065 * FUNCTION   : process_capture_request
4066 *
4067 * DESCRIPTION:
4068 *
4069 * PARAMETERS :
4070 *
4071 *
4072 * RETURN     :
4073 *==========================================================================*/
4074int QCamera3HardwareInterface::process_capture_request(
4075                    const struct camera3_device *device,
4076                    camera3_capture_request_t *request)
4077{
4078    ALOGV("%s: E", __func__);
4079    QCamera3HardwareInterface *hw =
4080        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4081    if (!hw) {
4082        ALOGE("%s: NULL camera device", __func__);
4083        return -EINVAL;
4084    }
4085
4086    int rc = hw->processCaptureRequest(request);
4087    ALOGV("%s: X", __func__);
4088    return rc;
4089}
4090
4091/*===========================================================================
4092 * FUNCTION   : get_metadata_vendor_tag_ops
4093 *
4094 * DESCRIPTION:
4095 *
4096 * PARAMETERS :
4097 *
4098 *
4099 * RETURN     :
4100 *==========================================================================*/
4101
4102void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4103                const struct camera3_device *device,
4104                vendor_tag_query_ops_t* ops)
4105{
4106    ALOGV("%s: E", __func__);
4107    QCamera3HardwareInterface *hw =
4108        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4109    if (!hw) {
4110        ALOGE("%s: NULL camera device", __func__);
4111        return;
4112    }
4113
4114    hw->getMetadataVendorTagOps(ops);
4115    ALOGV("%s: X", __func__);
4116    return;
4117}
4118
4119/*===========================================================================
4120 * FUNCTION   : dump
4121 *
4122 * DESCRIPTION:
4123 *
4124 * PARAMETERS :
4125 *
4126 *
4127 * RETURN     :
4128 *==========================================================================*/
4129
4130void QCamera3HardwareInterface::dump(
4131                const struct camera3_device *device, int fd)
4132{
4133    ALOGV("%s: E", __func__);
4134    QCamera3HardwareInterface *hw =
4135        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4136    if (!hw) {
4137        ALOGE("%s: NULL camera device", __func__);
4138        return;
4139    }
4140
4141    hw->dump(fd);
4142    ALOGV("%s: X", __func__);
4143    return;
4144}
4145
4146/*===========================================================================
4147 * FUNCTION   : flush
4148 *
4149 * DESCRIPTION:
4150 *
4151 * PARAMETERS :
4152 *
4153 *
4154 * RETURN     :
4155 *==========================================================================*/
4156
4157int QCamera3HardwareInterface::flush(
4158                const struct camera3_device *device)
4159{
4160    int rc;
4161    ALOGV("%s: E", __func__);
4162    QCamera3HardwareInterface *hw =
4163        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4164    if (!hw) {
4165        ALOGE("%s: NULL camera device", __func__);
4166        return -EINVAL;
4167    }
4168
4169    rc = hw->flush();
4170    ALOGV("%s: X", __func__);
4171    return rc;
4172}
4173
4174/*===========================================================================
4175 * FUNCTION   : close_camera_device
4176 *
4177 * DESCRIPTION:
4178 *
4179 * PARAMETERS :
4180 *
4181 *
4182 * RETURN     :
4183 *==========================================================================*/
4184int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4185{
4186    ALOGV("%s: E", __func__);
4187    int ret = NO_ERROR;
4188    QCamera3HardwareInterface *hw =
4189        reinterpret_cast<QCamera3HardwareInterface *>(
4190            reinterpret_cast<camera3_device_t *>(device)->priv);
4191    if (!hw) {
4192        ALOGE("NULL camera device");
4193        return BAD_VALUE;
4194    }
4195    delete hw;
4196
4197    pthread_mutex_lock(&mCameraSessionLock);
4198    mCameraSessionActive = 0;
4199    pthread_mutex_unlock(&mCameraSessionLock);
4200    ALOGV("%s: X", __func__);
4201    return ret;
4202}
4203
4204/*===========================================================================
4205 * FUNCTION   : getWaveletDenoiseProcessPlate
4206 *
4207 * DESCRIPTION: query wavelet denoise process plate
4208 *
4209 * PARAMETERS : None
4210 *
4211 * RETURN     : WNR prcocess plate vlaue
4212 *==========================================================================*/
4213cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4214{
4215    char prop[PROPERTY_VALUE_MAX];
4216    memset(prop, 0, sizeof(prop));
4217    property_get("persist.denoise.process.plates", prop, "0");
4218    int processPlate = atoi(prop);
4219    switch(processPlate) {
4220    case 0:
4221        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4222    case 1:
4223        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4224    case 2:
4225        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4226    case 3:
4227        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4228    default:
4229        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4230    }
4231}
4232
4233/*===========================================================================
4234 * FUNCTION   : needRotationReprocess
4235 *
4236 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4237 *
4238 * PARAMETERS : none
4239 *
4240 * RETURN     : true: needed
4241 *              false: no need
4242 *==========================================================================*/
4243bool QCamera3HardwareInterface::needRotationReprocess()
4244{
4245
4246    if (!mJpegSettings->is_jpeg_format) {
4247        // RAW image, no need to reprocess
4248        return false;
4249    }
4250
4251    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4252        mJpegSettings->jpeg_orientation > 0) {
4253        // current rotation is not zero, and pp has the capability to process rotation
4254        ALOGD("%s: need do reprocess for rotation", __func__);
4255        return true;
4256    }
4257
4258    return false;
4259}
4260
4261/*===========================================================================
4262 * FUNCTION   : needReprocess
4263 *
4264 * DESCRIPTION: if reprocess in needed
4265 *
4266 * PARAMETERS : none
4267 *
4268 * RETURN     : true: needed
4269 *              false: no need
4270 *==========================================================================*/
4271bool QCamera3HardwareInterface::needReprocess()
4272{
4273    if (!mJpegSettings->is_jpeg_format) {
4274        // RAW image, no need to reprocess
4275        return false;
4276    }
4277
4278    if ((mJpegSettings->min_required_pp_mask > 0) ||
4279         isWNREnabled()) {
4280        // TODO: add for ZSL HDR later
4281        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4282        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4283        return true;
4284    }
4285    return needRotationReprocess();
4286}
4287
4288/*===========================================================================
4289 * FUNCTION   : addOnlineReprocChannel
4290 *
4291 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4292 *              coming from input channel
4293 *
4294 * PARAMETERS :
4295 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4296 *
4297 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4298 *==========================================================================*/
4299QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4300              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4301{
4302    int32_t rc = NO_ERROR;
4303    QCamera3ReprocessChannel *pChannel = NULL;
4304    if (pInputChannel == NULL) {
4305        ALOGE("%s: input channel obj is NULL", __func__);
4306        return NULL;
4307    }
4308
4309    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4310            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4311    if (NULL == pChannel) {
4312        ALOGE("%s: no mem for reprocess channel", __func__);
4313        return NULL;
4314    }
4315
4316    // Capture channel, only need snapshot and postview streams start together
4317    mm_camera_channel_attr_t attr;
4318    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4319    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4320    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4321    rc = pChannel->initialize();
4322    if (rc != NO_ERROR) {
4323        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4324        delete pChannel;
4325        return NULL;
4326    }
4327
4328    // pp feature config
4329    cam_pp_feature_config_t pp_config;
4330    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4331    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4332        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4333        pp_config.sharpness = mJpegSettings->sharpness;
4334    }
4335
4336    if (isWNREnabled()) {
4337        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4338        pp_config.denoise2d.denoise_enable = 1;
4339        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4340    }
4341    if (needRotationReprocess()) {
4342        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4343        int rotation = mJpegSettings->jpeg_orientation;
4344        if (rotation == 0) {
4345            pp_config.rotation = ROTATE_0;
4346        } else if (rotation == 90) {
4347            pp_config.rotation = ROTATE_90;
4348        } else if (rotation == 180) {
4349            pp_config.rotation = ROTATE_180;
4350        } else if (rotation == 270) {
4351            pp_config.rotation = ROTATE_270;
4352        }
4353    }
4354
4355   rc = pChannel->addReprocStreamsFromSource(pp_config,
4356                                             pInputChannel,
4357                                             mMetadataChannel);
4358
4359    if (rc != NO_ERROR) {
4360        delete pChannel;
4361        return NULL;
4362    }
4363    return pChannel;
4364}
4365
4366int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4367{
4368    return gCamCapability[mCameraId]->min_num_pp_bufs;
4369}
4370
4371bool QCamera3HardwareInterface::isWNREnabled() {
4372    return gCamCapability[mCameraId]->isWnrSupported;
4373}
4374
4375}; //end namespace qcamera
4376