QCamera3HWI.cpp revision cc7a1db9ae6ca4ffcaf16d478247489bf40d1cae
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#include <cutils/properties.h>
34#include <hardware/camera3.h>
35#include <camera/CameraMetadata.h>
36#include <stdlib.h>
37#include <utils/Log.h>
38#include <utils/Errors.h>
39#include <ui/Fence.h>
40#include <gralloc_priv.h>
41#include "QCamera3HWI.h"
42#include "QCamera3Mem.h"
43#include "QCamera3Channel.h"
44#include "QCamera3PostProc.h"
45
46using namespace android;
47
48namespace qcamera {
49
50#define MAX(a, b) ((a) > (b) ? (a) : (b))
51
52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
54parm_buffer_t *prevSettings;
55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
56
57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
58    PTHREAD_MUTEX_INITIALIZER;
59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
60
61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
62    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
63    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
64    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
65    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
66    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
67    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
70    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
71};
72
73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
74    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
75    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
76    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
77    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
78    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
79    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
81    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
82    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
83};
84
85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
86    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
87    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
88    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
89    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
90    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
91    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
92    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
93    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
94    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
95    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
96    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
97    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
98    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
99    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
100    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
101    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
105    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
106    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
107    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
108    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
109    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
110    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
111};
112
113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
121    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
122    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
124    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
126};
127
128const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
129    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
130    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
131    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
132};
133
134const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
135    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
136    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
137};
138
139const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
140                                             320, 240, 176, 144, 0, 0};
141
142camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
143    initialize:                         QCamera3HardwareInterface::initialize,
144    configure_streams:                  QCamera3HardwareInterface::configure_streams,
145    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
146    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
147    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
148    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
149    dump:                               QCamera3HardwareInterface::dump,
150    flush:                              QCamera3HardwareInterface::flush,
151    reserved:                           {0},
152};
153
154
155/*===========================================================================
156 * FUNCTION   : QCamera3HardwareInterface
157 *
158 * DESCRIPTION: constructor of QCamera3HardwareInterface
159 *
160 * PARAMETERS :
161 *   @cameraId  : camera ID
162 *
163 * RETURN     : none
164 *==========================================================================*/
165QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
166    : mCameraId(cameraId),
167      mCameraHandle(NULL),
168      mCameraOpened(false),
169      mCameraInitialized(false),
170      mCallbackOps(NULL),
171      mInputStream(NULL),
172      mMetadataChannel(NULL),
173      mPictureChannel(NULL),
174      mFirstRequest(false),
175      mParamHeap(NULL),
176      mParameters(NULL),
177      mJpegSettings(NULL),
178      mIsZslMode(false),
179      mMinProcessedFrameDuration(0),
180      mMinJpegFrameDuration(0),
181      mMinRawFrameDuration(0),
182      m_pPowerModule(NULL)
183{
184    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
185    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
186    mCameraDevice.common.close = close_camera_device;
187    mCameraDevice.ops = &mCameraOps;
188    mCameraDevice.priv = this;
189    gCamCapability[cameraId]->version = CAM_HAL_V3;
190    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
191    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
192    gCamCapability[cameraId]->min_num_pp_bufs = 3;
193
194    pthread_cond_init(&mRequestCond, NULL);
195    mPendingRequest = 0;
196    mCurrentRequestId = -1;
197    pthread_mutex_init(&mMutex, NULL);
198
199    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
200        mDefaultMetadata[i] = NULL;
201
202#ifdef HAS_MULTIMEDIA_HINTS
203    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
204        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
205    }
206#endif
207}
208
209/*===========================================================================
210 * FUNCTION   : ~QCamera3HardwareInterface
211 *
212 * DESCRIPTION: destructor of QCamera3HardwareInterface
213 *
214 * PARAMETERS : none
215 *
216 * RETURN     : none
217 *==========================================================================*/
218QCamera3HardwareInterface::~QCamera3HardwareInterface()
219{
220    ALOGV("%s: E", __func__);
221    /* We need to stop all streams before deleting any stream */
222        /*flush the metadata list*/
223    if (!mStoredMetadataList.empty()) {
224        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
225              m != mStoredMetadataList.end(); m++) {
226            mMetadataChannel->bufDone(m->meta_buf);
227            free(m->meta_buf);
228            m = mStoredMetadataList.erase(m);
229        }
230    }
231
232    // NOTE: 'camera3_stream_t *' objects are already freed at
233    //        this stage by the framework
234    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
235        it != mStreamInfo.end(); it++) {
236        QCamera3Channel *channel = (*it)->channel;
237        if (channel) {
238            channel->stop();
239        }
240    }
241
242    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
243        it != mStreamInfo.end(); it++) {
244        QCamera3Channel *channel = (*it)->channel;
245        if ((*it)->registered && (*it)->buffer_set.buffers) {
246             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
247        }
248        if (channel)
249            delete channel;
250        free (*it);
251    }
252
253    mPictureChannel = NULL;
254
255    if (mJpegSettings != NULL) {
256        free(mJpegSettings);
257        mJpegSettings = NULL;
258    }
259
260    /* Clean up all channels */
261    if (mCameraInitialized) {
262        if (mMetadataChannel) {
263            mMetadataChannel->stop();
264            delete mMetadataChannel;
265            mMetadataChannel = NULL;
266        }
267        deinitParameters();
268    }
269
270    if (mCameraOpened)
271        closeCamera();
272
273    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
274        if (mDefaultMetadata[i])
275            free_camera_metadata(mDefaultMetadata[i]);
276
277    pthread_cond_destroy(&mRequestCond);
278
279    pthread_mutex_destroy(&mMutex);
280    ALOGV("%s: X", __func__);
281}
282
283/*===========================================================================
284 * FUNCTION   : openCamera
285 *
286 * DESCRIPTION: open camera
287 *
288 * PARAMETERS :
289 *   @hw_device  : double ptr for camera device struct
290 *
291 * RETURN     : int32_t type of status
292 *              NO_ERROR  -- success
293 *              none-zero failure code
294 *==========================================================================*/
295int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
296{
297    int rc = 0;
298    pthread_mutex_lock(&mCameraSessionLock);
299    if (mCameraSessionActive) {
300        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
301        pthread_mutex_unlock(&mCameraSessionLock);
302        return -EUSERS;
303    }
304
305    if (mCameraOpened) {
306        *hw_device = NULL;
307        return PERMISSION_DENIED;
308    }
309
310    rc = openCamera();
311    if (rc == 0) {
312        *hw_device = &mCameraDevice.common;
313        mCameraSessionActive = 1;
314    } else
315        *hw_device = NULL;
316
317#ifdef HAS_MULTIMEDIA_HINTS
318    if (rc == 0) {
319        if (m_pPowerModule) {
320            if (m_pPowerModule->powerHint) {
321                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
322                        (void *)"state=1");
323            }
324        }
325    }
326#endif
327    pthread_mutex_unlock(&mCameraSessionLock);
328    return rc;
329}
330
331/*===========================================================================
332 * FUNCTION   : openCamera
333 *
334 * DESCRIPTION: open camera
335 *
336 * PARAMETERS : none
337 *
338 * RETURN     : int32_t type of status
339 *              NO_ERROR  -- success
340 *              none-zero failure code
341 *==========================================================================*/
342int QCamera3HardwareInterface::openCamera()
343{
344    if (mCameraHandle) {
345        ALOGE("Failure: Camera already opened");
346        return ALREADY_EXISTS;
347    }
348    mCameraHandle = camera_open(mCameraId);
349    if (!mCameraHandle) {
350        ALOGE("camera_open failed.");
351        return UNKNOWN_ERROR;
352    }
353
354    mCameraOpened = true;
355
356    return NO_ERROR;
357}
358
359/*===========================================================================
360 * FUNCTION   : closeCamera
361 *
362 * DESCRIPTION: close camera
363 *
364 * PARAMETERS : none
365 *
366 * RETURN     : int32_t type of status
367 *              NO_ERROR  -- success
368 *              none-zero failure code
369 *==========================================================================*/
370int QCamera3HardwareInterface::closeCamera()
371{
372    int rc = NO_ERROR;
373
374    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
375    mCameraHandle = NULL;
376    mCameraOpened = false;
377
378#ifdef HAS_MULTIMEDIA_HINTS
379    if (rc == NO_ERROR) {
380        if (m_pPowerModule) {
381            if (m_pPowerModule->powerHint) {
382                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
383                        (void *)"state=0");
384            }
385        }
386    }
387#endif
388
389    return rc;
390}
391
392/*===========================================================================
393 * FUNCTION   : initialize
394 *
395 * DESCRIPTION: Initialize frameworks callback functions
396 *
397 * PARAMETERS :
398 *   @callback_ops : callback function to frameworks
399 *
400 * RETURN     :
401 *
402 *==========================================================================*/
403int QCamera3HardwareInterface::initialize(
404        const struct camera3_callback_ops *callback_ops)
405{
406    int rc;
407
408    pthread_mutex_lock(&mMutex);
409
410    rc = initParameters();
411    if (rc < 0) {
412        ALOGE("%s: initParamters failed %d", __func__, rc);
413       goto err1;
414    }
415    mCallbackOps = callback_ops;
416
417    pthread_mutex_unlock(&mMutex);
418    mCameraInitialized = true;
419    return 0;
420
421err1:
422    pthread_mutex_unlock(&mMutex);
423    return rc;
424}
425
426/*===========================================================================
427 * FUNCTION   : configureStreams
428 *
429 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
430 *              and output streams.
431 *
432 * PARAMETERS :
433 *   @stream_list : streams to be configured
434 *
435 * RETURN     :
436 *
437 *==========================================================================*/
438int QCamera3HardwareInterface::configureStreams(
439        camera3_stream_configuration_t *streamList)
440{
441    int rc = 0;
442    mIsZslMode = false;
443
444    // Sanity check stream_list
445    if (streamList == NULL) {
446        ALOGE("%s: NULL stream configuration", __func__);
447        return BAD_VALUE;
448    }
449    if (streamList->streams == NULL) {
450        ALOGE("%s: NULL stream list", __func__);
451        return BAD_VALUE;
452    }
453
454    if (streamList->num_streams < 1) {
455        ALOGE("%s: Bad number of streams requested: %d", __func__,
456                streamList->num_streams);
457        return BAD_VALUE;
458    }
459
460    /* first invalidate all the steams in the mStreamList
461     * if they appear again, they will be validated */
462    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
463            it != mStreamInfo.end(); it++) {
464        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
465        channel->stop();
466        (*it)->status = INVALID;
467    }
468    if (mMetadataChannel) {
469        /* If content of mStreamInfo is not 0, there is metadata stream */
470        mMetadataChannel->stop();
471    }
472
473    pthread_mutex_lock(&mMutex);
474
475    camera3_stream_t *inputStream = NULL;
476    camera3_stream_t *jpegStream = NULL;
477    cam_stream_size_info_t stream_config_info;
478
479    for (size_t i = 0; i < streamList->num_streams; i++) {
480        camera3_stream_t *newStream = streamList->streams[i];
481        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
482                __func__, newStream->stream_type, newStream->format,
483                 newStream->width, newStream->height);
484        //if the stream is in the mStreamList validate it
485        bool stream_exists = false;
486        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
487                it != mStreamInfo.end(); it++) {
488            if ((*it)->stream == newStream) {
489                QCamera3Channel *channel =
490                    (QCamera3Channel*)(*it)->stream->priv;
491                stream_exists = true;
492                (*it)->status = RECONFIGURE;
493                /*delete the channel object associated with the stream because
494                  we need to reconfigure*/
495                delete channel;
496                (*it)->stream->priv = NULL;
497                (*it)->channel = NULL;
498            }
499        }
500        if (!stream_exists) {
501            //new stream
502            stream_info_t* stream_info;
503            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
504            stream_info->stream = newStream;
505            stream_info->status = VALID;
506            stream_info->registered = 0;
507            stream_info->channel = NULL;
508            mStreamInfo.push_back(stream_info);
509        }
510        if (newStream->stream_type == CAMERA3_STREAM_INPUT
511                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
512            if (inputStream != NULL) {
513                ALOGE("%s: Multiple input streams requested!", __func__);
514                pthread_mutex_unlock(&mMutex);
515                return BAD_VALUE;
516            }
517            inputStream = newStream;
518        }
519        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
520            jpegStream = newStream;
521        }
522    }
523    mInputStream = inputStream;
524
525    /*clean up invalid streams*/
526    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
527            it != mStreamInfo.end();) {
528        if(((*it)->status) == INVALID){
529            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
530            delete channel;
531            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
532            free(*it);
533            it = mStreamInfo.erase(it);
534        } else {
535            it++;
536        }
537    }
538    if (mMetadataChannel) {
539        delete mMetadataChannel;
540        mMetadataChannel = NULL;
541    }
542
543    //Create metadata channel and initialize it
544    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
545                    mCameraHandle->ops, captureResultCb,
546                    &gCamCapability[mCameraId]->padding_info, this);
547    if (mMetadataChannel == NULL) {
548        ALOGE("%s: failed to allocate metadata channel", __func__);
549        rc = -ENOMEM;
550        pthread_mutex_unlock(&mMutex);
551        return rc;
552    }
553    rc = mMetadataChannel->initialize();
554    if (rc < 0) {
555        ALOGE("%s: metadata channel initialization failed", __func__);
556        delete mMetadataChannel;
557        mMetadataChannel = NULL;
558        pthread_mutex_unlock(&mMutex);
559        return rc;
560    }
561
562    /* Allocate channel objects for the requested streams */
563    for (size_t i = 0; i < streamList->num_streams; i++) {
564        camera3_stream_t *newStream = streamList->streams[i];
565        uint32_t stream_usage = newStream->usage;
566        stream_config_info.stream_sizes[i].width = newStream->width;
567        stream_config_info.stream_sizes[i].height = newStream->height;
568        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
569            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
570            //for zsl stream the size is jpeg size
571            stream_config_info.stream_sizes[i].width = jpegStream->width;
572            stream_config_info.stream_sizes[i].height = jpegStream->height;
573            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
574        } else {
575           //for non zsl streams find out the format
576           switch (newStream->format) {
577           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
578              {
579                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
580                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
581                 } else {
582                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
583                 }
584              }
585              break;
586           case HAL_PIXEL_FORMAT_YCbCr_420_888:
587              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
588              break;
589           case HAL_PIXEL_FORMAT_BLOB:
590              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
591              break;
592           default:
593              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
594              break;
595           }
596        }
597        if (newStream->priv == NULL) {
598            //New stream, construct channel
599            switch (newStream->stream_type) {
600            case CAMERA3_STREAM_INPUT:
601                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
602                break;
603            case CAMERA3_STREAM_BIDIRECTIONAL:
604                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
605                    GRALLOC_USAGE_HW_CAMERA_WRITE;
606                break;
607            case CAMERA3_STREAM_OUTPUT:
608                /* For video encoding stream, set read/write rarely
609                 * flag so that they may be set to un-cached */
610                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
611                    newStream->usage =
612                         (GRALLOC_USAGE_SW_READ_RARELY |
613                         GRALLOC_USAGE_SW_WRITE_RARELY |
614                         GRALLOC_USAGE_HW_CAMERA_WRITE);
615                else
616                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
617                break;
618            default:
619                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
620                break;
621            }
622
623            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
624                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
625                QCamera3Channel *channel;
626                switch (newStream->format) {
627                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
628                case HAL_PIXEL_FORMAT_YCbCr_420_888:
629                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
630                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
631                        jpegStream) {
632                        uint32_t width = jpegStream->width;
633                        uint32_t height = jpegStream->height;
634                        mIsZslMode = true;
635                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
636                            mCameraHandle->ops, captureResultCb,
637                            &gCamCapability[mCameraId]->padding_info, this, newStream,
638                            width, height);
639                    } else
640                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
641                            mCameraHandle->ops, captureResultCb,
642                            &gCamCapability[mCameraId]->padding_info, this, newStream);
643                    if (channel == NULL) {
644                        ALOGE("%s: allocation of channel failed", __func__);
645                        pthread_mutex_unlock(&mMutex);
646                        return -ENOMEM;
647                    }
648
649                    newStream->priv = channel;
650                    break;
651                case HAL_PIXEL_FORMAT_BLOB:
652                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
653                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
654                            mCameraHandle->ops, captureResultCb,
655                            &gCamCapability[mCameraId]->padding_info, this, newStream);
656                    if (mPictureChannel == NULL) {
657                        ALOGE("%s: allocation of channel failed", __func__);
658                        pthread_mutex_unlock(&mMutex);
659                        return -ENOMEM;
660                    }
661                    newStream->priv = (QCamera3Channel*)mPictureChannel;
662                    break;
663
664                //TODO: Add support for app consumed format?
665                default:
666                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
667                    break;
668                }
669            }
670
671            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
672                    it != mStreamInfo.end(); it++) {
673                if ((*it)->stream == newStream) {
674                    (*it)->channel = (QCamera3Channel*) newStream->priv;
675                    break;
676                }
677            }
678        } else {
679            // Channel already exists for this stream
680            // Do nothing for now
681        }
682    }
683
684    int32_t hal_version = CAM_HAL_V3;
685    stream_config_info.num_streams = streamList->num_streams;
686
687    // settings/parameters don't carry over for new configureStreams
688    memset(mParameters, 0, sizeof(parm_buffer_t));
689
690    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
691    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
692                sizeof(hal_version), &hal_version);
693
694    AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
695                sizeof(stream_config_info), &stream_config_info);
696
697    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
698
699    /*For the streams to be reconfigured we need to register the buffers
700      since the framework wont*/
701    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
702            it != mStreamInfo.end(); it++) {
703        if ((*it)->status == RECONFIGURE) {
704            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
705            /*only register buffers for streams that have already been
706              registered*/
707            if ((*it)->registered) {
708                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
709                        (*it)->buffer_set.buffers);
710                if (rc != NO_ERROR) {
711                    ALOGE("%s: Failed to register the buffers of old stream,\
712                            rc = %d", __func__, rc);
713                }
714                ALOGV("%s: channel %p has %d buffers",
715                        __func__, channel, (*it)->buffer_set.num_buffers);
716            }
717        }
718
719        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
720        if (index == NAME_NOT_FOUND) {
721            mPendingBuffersMap.add((*it)->stream, 0);
722        } else {
723            mPendingBuffersMap.editValueAt(index) = 0;
724        }
725    }
726
727    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
728    mPendingRequestsList.clear();
729
730    mPendingFrameDropList.clear();
731
732    /*flush the metadata list*/
733    if (!mStoredMetadataList.empty()) {
734        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
735              m != mStoredMetadataList.end(); m++) {
736            mMetadataChannel->bufDone(m->meta_buf);
737            free(m->meta_buf);
738            m = mStoredMetadataList.erase(m);
739        }
740    }
741
742    mFirstRequest = true;
743
744    //Get min frame duration for this streams configuration
745    deriveMinFrameDuration();
746
747    pthread_mutex_unlock(&mMutex);
748    return rc;
749}
750
751/*===========================================================================
752 * FUNCTION   : validateCaptureRequest
753 *
754 * DESCRIPTION: validate a capture request from camera service
755 *
756 * PARAMETERS :
757 *   @request : request from framework to process
758 *
759 * RETURN     :
760 *
761 *==========================================================================*/
762int QCamera3HardwareInterface::validateCaptureRequest(
763                    camera3_capture_request_t *request)
764{
765    ssize_t idx = 0;
766    const camera3_stream_buffer_t *b;
767    CameraMetadata meta;
768
769    /* Sanity check the request */
770    if (request == NULL) {
771        ALOGE("%s: NULL capture request", __func__);
772        return BAD_VALUE;
773    }
774
775    uint32_t frameNumber = request->frame_number;
776    if (request->input_buffer != NULL &&
777            request->input_buffer->stream != mInputStream) {
778        ALOGE("%s: Request %d: Input buffer not from input stream!",
779                __FUNCTION__, frameNumber);
780        return BAD_VALUE;
781    }
782    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
783        ALOGE("%s: Request %d: No output buffers provided!",
784                __FUNCTION__, frameNumber);
785        return BAD_VALUE;
786    }
787    if (request->input_buffer != NULL) {
788        b = request->input_buffer;
789        QCamera3Channel *channel =
790            static_cast<QCamera3Channel*>(b->stream->priv);
791        if (channel == NULL) {
792            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
793                    __func__, frameNumber, idx);
794            return BAD_VALUE;
795        }
796        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
797            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
798                    __func__, frameNumber, idx);
799            return BAD_VALUE;
800        }
801        if (b->release_fence != -1) {
802            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
803                    __func__, frameNumber, idx);
804            return BAD_VALUE;
805        }
806        if (b->buffer == NULL) {
807            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
808                    __func__, frameNumber, idx);
809            return BAD_VALUE;
810        }
811    }
812
813    // Validate all buffers
814    b = request->output_buffers;
815    do {
816        QCamera3Channel *channel =
817                static_cast<QCamera3Channel*>(b->stream->priv);
818        if (channel == NULL) {
819            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
820                    __func__, frameNumber, idx);
821            return BAD_VALUE;
822        }
823        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
824            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
825                    __func__, frameNumber, idx);
826            return BAD_VALUE;
827        }
828        if (b->release_fence != -1) {
829            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
830                    __func__, frameNumber, idx);
831            return BAD_VALUE;
832        }
833        if (b->buffer == NULL) {
834            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
835                    __func__, frameNumber, idx);
836            return BAD_VALUE;
837        }
838        idx++;
839        b = request->output_buffers + idx;
840    } while (idx < (ssize_t)request->num_output_buffers);
841
842    return NO_ERROR;
843}
844
845/*===========================================================================
846 * FUNCTION   : deriveMinFrameDuration
847 *
848 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
849 *              on currently configured streams.
850 *
851 * PARAMETERS : NONE
852 *
853 * RETURN     : NONE
854 *
855 *==========================================================================*/
856void QCamera3HardwareInterface::deriveMinFrameDuration()
857{
858    int32_t maxJpegDimension, maxProcessedDimension;
859
860    maxJpegDimension = 0;
861    maxProcessedDimension = 0;
862
863    // Figure out maximum jpeg, processed, and raw dimensions
864    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
865        it != mStreamInfo.end(); it++) {
866
867        // Input stream doesn't have valid stream_type
868        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
869            continue;
870
871        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
872        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
873            if (dimension > maxJpegDimension)
874                maxJpegDimension = dimension;
875        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
876            if (dimension > maxProcessedDimension)
877                maxProcessedDimension = dimension;
878        }
879    }
880
881    //Assume all jpeg dimensions are in processed dimensions.
882    if (maxJpegDimension > maxProcessedDimension)
883        maxProcessedDimension = maxJpegDimension;
884
885    //Find minimum durations for processed, jpeg, and raw
886    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
887    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
888        if (maxProcessedDimension ==
889            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
890            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
891            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
892            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
893            break;
894        }
895    }
896}
897
898/*===========================================================================
899 * FUNCTION   : getMinFrameDuration
900 *
901 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
902 *              and current request configuration.
903 *
904 * PARAMETERS : @request: requset sent by the frameworks
905 *
906 * RETURN     : min farme duration for a particular request
907 *
908 *==========================================================================*/
909int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
910{
911    bool hasJpegStream = false;
912    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
913        const camera3_stream_t *stream = request->output_buffers[i].stream;
914        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
915            hasJpegStream = true;
916    }
917
918    if (!hasJpegStream)
919        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
920    else
921        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
922}
923
924/*===========================================================================
925 * FUNCTION   : handleMetadataWithLock
926 *
927 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
928 *
929 * PARAMETERS : @metadata_buf: metadata buffer
930 *
931 * RETURN     :
932 *
933 *==========================================================================*/
934void QCamera3HardwareInterface::handleMetadataWithLock(
935    mm_camera_super_buf_t *metadata_buf)
936{
937    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
938    int32_t frame_number_valid = *(int32_t *)
939        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
940    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
941        CAM_INTF_META_PENDING_REQUESTS, metadata);
942    uint32_t frame_number = *(uint32_t *)
943        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
944    const struct timeval *tv = (const struct timeval *)
945        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
946    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
947        tv->tv_usec * NSEC_PER_USEC;
948    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
949        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
950
951    if (!frame_number_valid) {
952        ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
953        mMetadataChannel->bufDone(metadata_buf);
954        free(metadata_buf);
955        goto done_metadata;
956    }
957    ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
958            frame_number, capture_time);
959
960    // Go through the pending requests info and send shutter/results to frameworks
961    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
962        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
963        camera3_capture_result_t result;
964        camera3_notify_msg_t notify_msg;
965        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
966
967        // Flush out all entries with less or equal frame numbers.
968
969        //TODO: Make sure shutter timestamp really reflects shutter timestamp.
970        //Right now it's the same as metadata timestamp
971
972        //TODO: When there is metadata drop, how do we derive the timestamp of
973        //dropped frames? For now, we fake the dropped timestamp by substracting
974        //from the reported timestamp
975        nsecs_t current_capture_time = capture_time -
976            (frame_number - i->frame_number) * NSEC_PER_33MSEC;
977
978        // Send shutter notify to frameworks
979        notify_msg.type = CAMERA3_MSG_SHUTTER;
980        notify_msg.message.shutter.frame_number = i->frame_number;
981        notify_msg.message.shutter.timestamp = current_capture_time;
982        mCallbackOps->notify(mCallbackOps, &notify_msg);
983        ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
984                i->frame_number, capture_time);
985
986        // Check whether any stream buffer corresponding to this is dropped or not
987        // If dropped, then send the ERROR_BUFFER for the corresponding stream
988        if (cam_frame_drop.frame_dropped) {
989            camera3_notify_msg_t notify_msg;
990            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
991                    j != i->buffers.end(); j++) {
992                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
993                uint32_t streamTypeMask = channel->getStreamTypeMask();
994                if (streamTypeMask & cam_frame_drop.stream_type_mask) {
995                    // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
996                    ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
997                           __func__, i->frame_number, streamTypeMask);
998                    notify_msg.type = CAMERA3_MSG_ERROR;
999                    notify_msg.message.error.frame_number = i->frame_number;
1000                    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1001                    notify_msg.message.error.error_stream = j->stream;
1002                    mCallbackOps->notify(mCallbackOps, &notify_msg);
1003                    ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
1004                           __func__, i->frame_number, streamTypeMask);
1005                    PendingFrameDropInfo PendingFrameDrop;
1006                    PendingFrameDrop.frame_number=i->frame_number;
1007                    PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
1008                    // Add the Frame drop info to mPendingFrameDropList
1009                    mPendingFrameDropList.push_back(PendingFrameDrop);
1010                }
1011            }
1012        }
1013
1014        // Send empty metadata with already filled buffers for dropped metadata
1015        // and send valid metadata with already filled buffers for current metadata
1016        if (i->frame_number < frame_number) {
1017            CameraMetadata dummyMetadata;
1018            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1019                    &current_capture_time, 1);
1020            dummyMetadata.update(ANDROID_REQUEST_ID,
1021                    &(i->request_id), 1);
1022            result.result = dummyMetadata.release();
1023        } else {
1024            result.result = translateCbMetadataToResultMetadata(metadata,
1025                    current_capture_time, i->request_id, i->blob_request,
1026                    &(i->input_jpeg_settings));
1027            if (mIsZslMode) {
1028                int found_metadata = 0;
1029                //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1030                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1031                    j != i->buffers.end(); j++) {
1032                    if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1033                        //check if corresp. zsl already exists in the stored metadata list
1034                        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1035                                m != mStoredMetadataList.begin(); m++) {
1036                            if (m->frame_number == frame_number) {
1037                                m->meta_buf = metadata_buf;
1038                                found_metadata = 1;
1039                                break;
1040                            }
1041                        }
1042                        if (!found_metadata) {
1043                            MetadataBufferInfo store_meta_info;
1044                            store_meta_info.meta_buf = metadata_buf;
1045                            store_meta_info.frame_number = frame_number;
1046                            mStoredMetadataList.push_back(store_meta_info);
1047                            found_metadata = 1;
1048                        }
1049                    }
1050                }
1051                if (!found_metadata) {
1052                    if (!i->input_buffer_present && i->blob_request) {
1053                        //livesnapshot or fallback non-zsl snapshot case
1054                        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1055                                j != i->buffers.end(); j++){
1056                            if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1057                                j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1058                                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1059                                break;
1060                            }
1061                        }
1062                    } else {
1063                        //return the metadata immediately
1064                        mMetadataChannel->bufDone(metadata_buf);
1065                        free(metadata_buf);
1066                    }
1067                }
1068            } else if (!mIsZslMode && i->blob_request) {
1069                //If it is a blob request then send the metadata to the picture channel
1070                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
1071            } else {
1072                // Return metadata buffer
1073                mMetadataChannel->bufDone(metadata_buf);
1074                free(metadata_buf);
1075            }
1076        }
1077        if (!result.result) {
1078            ALOGE("%s: metadata is NULL", __func__);
1079        }
1080        result.frame_number = i->frame_number;
1081        result.num_output_buffers = 0;
1082        result.output_buffers = NULL;
1083        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1084                    j != i->buffers.end(); j++) {
1085            if (j->buffer) {
1086                result.num_output_buffers++;
1087            }
1088        }
1089
1090        if (result.num_output_buffers > 0) {
1091            camera3_stream_buffer_t *result_buffers =
1092                new camera3_stream_buffer_t[result.num_output_buffers];
1093            if (!result_buffers) {
1094                ALOGE("%s: Fatal error: out of memory", __func__);
1095            }
1096            size_t result_buffers_idx = 0;
1097            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1098                    j != i->buffers.end(); j++) {
1099                if (j->buffer) {
1100                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1101                            m != mPendingFrameDropList.end(); m++) {
1102                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1103                        uint32_t streamTypeMask = channel->getStreamTypeMask();
1104                        if((m->stream_type_mask & streamTypeMask) &&
1105                                (m->frame_number==frame_number)) {
1106                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1107                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1108                                  __func__, frame_number, streamTypeMask);
1109                            m = mPendingFrameDropList.erase(m);
1110                            break;
1111                        }
1112                    }
1113                    result_buffers[result_buffers_idx++] = *(j->buffer);
1114                    free(j->buffer);
1115                    j->buffer = NULL;
1116                    mPendingBuffersMap.editValueFor(j->stream)--;
1117                }
1118            }
1119            result.output_buffers = result_buffers;
1120
1121            mCallbackOps->process_capture_result(mCallbackOps, &result);
1122            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1123                    __func__, result.frame_number, current_capture_time);
1124            free_camera_metadata((camera_metadata_t *)result.result);
1125            delete[] result_buffers;
1126        } else {
1127            mCallbackOps->process_capture_result(mCallbackOps, &result);
1128            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1129                        __func__, result.frame_number, current_capture_time);
1130            free_camera_metadata((camera_metadata_t *)result.result);
1131        }
1132        // erase the element from the list
1133        i = mPendingRequestsList.erase(i);
1134    }
1135
1136done_metadata:
1137    if (!pending_requests)
1138        unblockRequestIfNecessary();
1139
1140}
1141
1142/*===========================================================================
1143 * FUNCTION   : handleBufferWithLock
1144 *
1145 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1146 *
1147 * PARAMETERS : @buffer: image buffer for the callback
1148 *              @frame_number: frame number of the image buffer
1149 *
1150 * RETURN     :
1151 *
1152 *==========================================================================*/
1153void QCamera3HardwareInterface::handleBufferWithLock(
1154    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1155{
1156    // If the frame number doesn't exist in the pending request list,
1157    // directly send the buffer to the frameworks, and update pending buffers map
1158    // Otherwise, book-keep the buffer.
1159    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1160    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1161        i++;
1162    }
1163    if (i == mPendingRequestsList.end()) {
1164        // Verify all pending requests frame_numbers are greater
1165        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1166                j != mPendingRequestsList.end(); j++) {
1167            if (j->frame_number < frame_number) {
1168                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1169                        __func__, j->frame_number, frame_number);
1170            }
1171        }
1172        camera3_capture_result_t result;
1173        result.result = NULL;
1174        result.frame_number = frame_number;
1175        result.num_output_buffers = 1;
1176        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1177                m != mPendingFrameDropList.end(); m++) {
1178            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1179            uint32_t streamTypeMask = channel->getStreamTypeMask();
1180            if((m->stream_type_mask & streamTypeMask) &&
1181                (m->frame_number==frame_number) ) {
1182                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1183                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
1184                        __func__, frame_number, streamTypeMask);
1185                m = mPendingFrameDropList.erase(m);
1186                break;
1187            }
1188        }
1189        result.output_buffers = buffer;
1190        ALOGV("%s: result frame_number = %d, buffer = %p",
1191                __func__, frame_number, buffer);
1192        mPendingBuffersMap.editValueFor(buffer->stream)--;
1193        if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1194            int found = 0;
1195            for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1196                k != mStoredMetadataList.end(); k++) {
1197                if (k->frame_number == frame_number) {
1198                    k->zsl_buf_hdl = buffer->buffer;
1199                    found = 1;
1200                    break;
1201                }
1202            }
1203            if (!found) {
1204                MetadataBufferInfo meta_info;
1205                meta_info.frame_number = frame_number;
1206                meta_info.zsl_buf_hdl = buffer->buffer;
1207                mStoredMetadataList.push_back(meta_info);
1208            }
1209        }
1210        mCallbackOps->process_capture_result(mCallbackOps, &result);
1211        unblockRequestIfNecessary();
1212    } else {
1213        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1214                j != i->buffers.end(); j++) {
1215            if (j->stream == buffer->stream) {
1216                if (j->buffer != NULL) {
1217                    ALOGE("%s: Error: buffer is already set", __func__);
1218                } else {
1219                    j->buffer = (camera3_stream_buffer_t *)malloc(
1220                            sizeof(camera3_stream_buffer_t));
1221                    *(j->buffer) = *buffer;
1222                    ALOGV("%s: cache buffer %p at result frame_number %d",
1223                            __func__, buffer, frame_number);
1224                }
1225            }
1226        }
1227    }
1228}
1229
1230/*===========================================================================
1231 * FUNCTION   : unblockRequestIfNecessary
1232 *
1233 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1234 *              that mMutex is held when this function is called.
1235 *
1236 * PARAMETERS :
1237 *
1238 * RETURN     :
1239 *
1240 *==========================================================================*/
1241void QCamera3HardwareInterface::unblockRequestIfNecessary()
1242{
1243    bool max_buffers_dequeued = false;
1244    for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1245        const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1246        uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1247        if (queued_buffers == stream->max_buffers) {
1248            max_buffers_dequeued = true;
1249            break;
1250        }
1251    }
1252    if (!max_buffers_dequeued) {
1253        // Unblock process_capture_request
1254        mPendingRequest = 0;
1255        pthread_cond_signal(&mRequestCond);
1256    }
1257}
1258
1259/*===========================================================================
1260 * FUNCTION   : registerStreamBuffers
1261 *
1262 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1263 *
1264 * PARAMETERS :
1265 *   @stream_list : streams to be configured
1266 *
1267 * RETURN     :
1268 *
1269 *==========================================================================*/
1270int QCamera3HardwareInterface::registerStreamBuffers(
1271        const camera3_stream_buffer_set_t *buffer_set)
1272{
1273    int rc = 0;
1274
1275    pthread_mutex_lock(&mMutex);
1276
1277    if (buffer_set == NULL) {
1278        ALOGE("%s: Invalid buffer_set parameter.", __func__);
1279        pthread_mutex_unlock(&mMutex);
1280        return -EINVAL;
1281    }
1282    if (buffer_set->stream == NULL) {
1283        ALOGE("%s: Invalid stream parameter.", __func__);
1284        pthread_mutex_unlock(&mMutex);
1285        return -EINVAL;
1286    }
1287    if (buffer_set->num_buffers < 1) {
1288        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
1289        pthread_mutex_unlock(&mMutex);
1290        return -EINVAL;
1291    }
1292    if (buffer_set->buffers == NULL) {
1293        ALOGE("%s: Invalid buffers parameter.", __func__);
1294        pthread_mutex_unlock(&mMutex);
1295        return -EINVAL;
1296    }
1297
1298    camera3_stream_t *stream = buffer_set->stream;
1299    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
1300
1301    //set the buffer_set in the mStreamInfo array
1302    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1303            it != mStreamInfo.end(); it++) {
1304        if ((*it)->stream == stream) {
1305            uint32_t numBuffers = buffer_set->num_buffers;
1306            (*it)->buffer_set.stream = buffer_set->stream;
1307            (*it)->buffer_set.num_buffers = numBuffers;
1308            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
1309            if ((*it)->buffer_set.buffers == NULL) {
1310                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
1311                pthread_mutex_unlock(&mMutex);
1312                return -ENOMEM;
1313            }
1314            for (size_t j = 0; j < numBuffers; j++){
1315                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
1316            }
1317            (*it)->registered = 1;
1318        }
1319    }
1320    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
1321    if (rc < 0) {
1322        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
1323        pthread_mutex_unlock(&mMutex);
1324        return -ENODEV;
1325    }
1326
1327    pthread_mutex_unlock(&mMutex);
1328    return NO_ERROR;
1329}
1330
1331/*===========================================================================
1332 * FUNCTION   : processCaptureRequest
1333 *
1334 * DESCRIPTION: process a capture request from camera service
1335 *
1336 * PARAMETERS :
1337 *   @request : request from framework to process
1338 *
1339 * RETURN     :
1340 *
1341 *==========================================================================*/
1342int QCamera3HardwareInterface::processCaptureRequest(
1343                    camera3_capture_request_t *request)
1344{
1345    int rc = NO_ERROR;
1346    int32_t request_id;
1347    CameraMetadata meta;
1348    MetadataBufferInfo reproc_meta;
1349    int queueMetadata = 0;
1350
1351    pthread_mutex_lock(&mMutex);
1352
1353    rc = validateCaptureRequest(request);
1354    if (rc != NO_ERROR) {
1355        ALOGE("%s: incoming request is not valid", __func__);
1356        pthread_mutex_unlock(&mMutex);
1357        return rc;
1358    }
1359
1360    meta = request->settings;
1361
1362    // For first capture request, send capture intent, and
1363    // stream on all streams
1364    if (mFirstRequest) {
1365
1366        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1367            int32_t hal_version = CAM_HAL_V3;
1368            uint8_t captureIntent =
1369                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1370
1371            memset(mParameters, 0, sizeof(parm_buffer_t));
1372            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1373            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1374                sizeof(hal_version), &hal_version);
1375            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1376                sizeof(captureIntent), &captureIntent);
1377            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1378                mParameters);
1379        }
1380
1381        mMetadataChannel->start();
1382        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1383            it != mStreamInfo.end(); it++) {
1384            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1385            channel->start();
1386        }
1387    }
1388
1389    uint32_t frameNumber = request->frame_number;
1390    uint32_t streamTypeMask = 0;
1391
1392    if (meta.exists(ANDROID_REQUEST_ID)) {
1393        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1394        mCurrentRequestId = request_id;
1395        ALOGV("%s: Received request with id: %d",__func__, request_id);
1396    } else if (mFirstRequest || mCurrentRequestId == -1){
1397        ALOGE("%s: Unable to find request id field, \
1398                & no previous id available", __func__);
1399        return NAME_NOT_FOUND;
1400    } else {
1401        ALOGV("%s: Re-using old request id", __func__);
1402        request_id = mCurrentRequestId;
1403    }
1404
1405    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1406                                    __func__, __LINE__,
1407                                    request->num_output_buffers,
1408                                    request->input_buffer,
1409                                    frameNumber);
1410    // Acquire all request buffers first
1411    int blob_request = 0;
1412    for (size_t i = 0; i < request->num_output_buffers; i++) {
1413        const camera3_stream_buffer_t& output = request->output_buffers[i];
1414        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1415        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1416
1417        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1418        //Call function to store local copy of jpeg data for encode params.
1419            blob_request = 1;
1420            rc = getJpegSettings(request->settings);
1421            if (rc < 0) {
1422                ALOGE("%s: failed to get jpeg parameters", __func__);
1423                pthread_mutex_unlock(&mMutex);
1424                return rc;
1425            }
1426        }
1427
1428        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1429        if (rc != OK) {
1430            ALOGE("%s: fence wait failed %d", __func__, rc);
1431            pthread_mutex_unlock(&mMutex);
1432            return rc;
1433        }
1434        streamTypeMask |= channel->getStreamTypeMask();
1435    }
1436
1437    rc = setFrameParameters(request, streamTypeMask);
1438    if (rc < 0) {
1439        ALOGE("%s: fail to set frame parameters", __func__);
1440        pthread_mutex_unlock(&mMutex);
1441        return rc;
1442    }
1443
1444    /* Update pending request list and pending buffers map */
1445    PendingRequestInfo pendingRequest;
1446    pendingRequest.frame_number = frameNumber;
1447    pendingRequest.num_buffers = request->num_output_buffers;
1448    pendingRequest.request_id = request_id;
1449    pendingRequest.blob_request = blob_request;
1450    if (blob_request)
1451        pendingRequest.input_jpeg_settings = *mJpegSettings;
1452    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1453
1454    for (size_t i = 0; i < request->num_output_buffers; i++) {
1455        RequestedBufferInfo requestedBuf;
1456        requestedBuf.stream = request->output_buffers[i].stream;
1457        requestedBuf.buffer = NULL;
1458        pendingRequest.buffers.push_back(requestedBuf);
1459
1460        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1461    }
1462    mPendingRequestsList.push_back(pendingRequest);
1463
1464    // Notify metadata channel we receive a request
1465    mMetadataChannel->request(NULL, frameNumber);
1466
1467    // Call request on other streams
1468    for (size_t i = 0; i < request->num_output_buffers; i++) {
1469        const camera3_stream_buffer_t& output = request->output_buffers[i];
1470        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1471        mm_camera_buf_def_t *pInputBuffer = NULL;
1472
1473        if (channel == NULL) {
1474            ALOGE("%s: invalid channel pointer for stream", __func__);
1475            continue;
1476        }
1477
1478        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1479            QCamera3RegularChannel* inputChannel = NULL;
1480            if(request->input_buffer != NULL){
1481                //Try to get the internal format
1482                inputChannel = (QCamera3RegularChannel*)
1483                    request->input_buffer->stream->priv;
1484                if(inputChannel == NULL ){
1485                    ALOGE("%s: failed to get input channel handle", __func__);
1486                } else {
1487                    pInputBuffer =
1488                        inputChannel->getInternalFormatBuffer(
1489                                request->input_buffer->buffer);
1490                    ALOGD("%s: Input buffer dump",__func__);
1491                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1492                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1493                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1494                    ALOGD("Handle:%p", request->input_buffer->buffer);
1495                    //TODO: need to get corresponding metadata and send it to pproc
1496                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1497                         m != mStoredMetadataList.end(); m++) {
1498                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1499                            reproc_meta.meta_buf = m->meta_buf;
1500                            queueMetadata = 1;
1501                            break;
1502                        }
1503                    }
1504                }
1505            }
1506            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1507                            pInputBuffer,(QCamera3Channel*)inputChannel);
1508            if (queueMetadata) {
1509                mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
1510            }
1511        } else {
1512            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1513                __LINE__, output.buffer, frameNumber);
1514            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1515                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1516                     m != mStoredMetadataList.end(); m++) {
1517                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1518                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1519                            mMetadataChannel->bufDone(m->meta_buf);
1520                            free(m->meta_buf);
1521                            m = mStoredMetadataList.erase(m);
1522                            break;
1523                        }
1524                   }
1525                }
1526            }
1527            rc = channel->request(output.buffer, frameNumber);
1528        }
1529        if (rc < 0)
1530            ALOGE("%s: request failed", __func__);
1531    }
1532
1533    mFirstRequest = false;
1534    // Added a timed condition wait
1535    struct timespec ts;
1536    uint8_t isValidTimeout = 1;
1537    rc = clock_gettime(CLOCK_REALTIME, &ts);
1538    if (rc < 0) {
1539        isValidTimeout = 0;
1540        ALOGE("%s: Error reading the real time clock!!", __func__);
1541    }
1542    else {
1543        // Make timeout as 5 sec for request to be honored
1544        ts.tv_sec += 5;
1545    }
1546    //Block on conditional variable
1547    mPendingRequest = 1;
1548    while (mPendingRequest == 1) {
1549        if (!isValidTimeout) {
1550            ALOGV("%s: Blocking on conditional wait", __func__);
1551            pthread_cond_wait(&mRequestCond, &mMutex);
1552        }
1553        else {
1554            ALOGV("%s: Blocking on timed conditional wait", __func__);
1555            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1556            if (rc == ETIMEDOUT) {
1557                rc = -ENODEV;
1558                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1559                break;
1560            }
1561        }
1562        ALOGV("%s: Unblocked", __func__);
1563    }
1564
1565    pthread_mutex_unlock(&mMutex);
1566
1567    return rc;
1568}
1569
1570/*===========================================================================
1571 * FUNCTION   : getMetadataVendorTagOps
1572 *
1573 * DESCRIPTION:
1574 *
1575 * PARAMETERS :
1576 *
1577 *
1578 * RETURN     :
1579 *==========================================================================*/
1580void QCamera3HardwareInterface::getMetadataVendorTagOps(
1581                    vendor_tag_query_ops_t* /*ops*/)
1582{
1583    /* Enable locks when we eventually add Vendor Tags */
1584    /*
1585    pthread_mutex_lock(&mMutex);
1586
1587    pthread_mutex_unlock(&mMutex);
1588    */
1589    return;
1590}
1591
1592/*===========================================================================
1593 * FUNCTION   : dump
1594 *
1595 * DESCRIPTION:
1596 *
1597 * PARAMETERS :
1598 *
1599 *
1600 * RETURN     :
1601 *==========================================================================*/
1602void QCamera3HardwareInterface::dump(int /*fd*/)
1603{
1604    /*Enable lock when we implement this function*/
1605    /*
1606    pthread_mutex_lock(&mMutex);
1607
1608    pthread_mutex_unlock(&mMutex);
1609    */
1610    return;
1611}
1612
1613/*===========================================================================
1614 * FUNCTION   : flush
1615 *
1616 * DESCRIPTION:
1617 *
1618 * PARAMETERS :
1619 *
1620 *
1621 * RETURN     :
1622 *==========================================================================*/
1623int QCamera3HardwareInterface::flush()
1624{
1625    /*Enable lock when we implement this function*/
1626    /*
1627    pthread_mutex_lock(&mMutex);
1628
1629    pthread_mutex_unlock(&mMutex);
1630    */
1631    return 0;
1632}
1633
1634/*===========================================================================
1635 * FUNCTION   : captureResultCb
1636 *
1637 * DESCRIPTION: Callback handler for all capture result
1638 *              (streams, as well as metadata)
1639 *
1640 * PARAMETERS :
1641 *   @metadata : metadata information
1642 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1643 *               NULL if metadata.
1644 *
1645 * RETURN     : NONE
1646 *==========================================================================*/
1647void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1648                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1649{
1650    pthread_mutex_lock(&mMutex);
1651
1652    if (metadata_buf)
1653        handleMetadataWithLock(metadata_buf);
1654    else
1655        handleBufferWithLock(buffer, frame_number);
1656
1657    pthread_mutex_unlock(&mMutex);
1658    return;
1659}
1660
1661/*===========================================================================
1662 * FUNCTION   : translateCbMetadataToResultMetadata
1663 *
1664 * DESCRIPTION:
1665 *
1666 * PARAMETERS :
1667 *   @metadata : metadata information from callback
1668 *
1669 * RETURN     : camera_metadata_t*
1670 *              metadata in a format specified by fwk
1671 *==========================================================================*/
1672camera_metadata_t*
1673QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1674                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1675                                 int32_t request_id, int32_t BlobRequest,
1676                                 jpeg_settings_t* inputjpegsettings)
1677{
1678    CameraMetadata camMetadata;
1679    camera_metadata_t* resultMetadata;
1680
1681    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1682    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1683
1684    // Update the JPEG related info
1685    if (BlobRequest) {
1686        camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
1687        camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
1688
1689        int32_t thumbnailSizeTable[2];
1690        thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
1691        thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
1692        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
1693        ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
1694               inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
1695
1696        if (inputjpegsettings->gps_coordinates[0]) {
1697            double gpsCoordinates[3];
1698            gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
1699            gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
1700            gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
1701            camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
1702            ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
1703                 gpsCoordinates[1],gpsCoordinates[2]);
1704        }
1705
1706        if (inputjpegsettings->gps_timestamp) {
1707            camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
1708            ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
1709        }
1710
1711        String8 str(inputjpegsettings->gps_processing_method);
1712        if (strlen(mJpegSettings->gps_processing_method) > 0) {
1713            camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
1714        }
1715    }
1716    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1717    uint8_t next_entry;
1718    while (curr_entry != CAM_INTF_PARM_MAX) {
1719       switch (curr_entry) {
1720         case CAM_INTF_META_FACE_DETECTION:{
1721             cam_face_detection_data_t *faceDetectionInfo =
1722                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1723             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1724             int32_t faceIds[MAX_ROI];
1725             uint8_t faceScores[MAX_ROI];
1726             int32_t faceRectangles[MAX_ROI * 4];
1727             int32_t faceLandmarks[MAX_ROI * 6];
1728             int j = 0, k = 0;
1729             for (int i = 0; i < numFaces; i++) {
1730                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1731                 faceScores[i] = faceDetectionInfo->faces[i].score;
1732                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1733                         faceRectangles+j, -1);
1734                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1735                 j+= 4;
1736                 k+= 6;
1737             }
1738
1739             if (numFaces <= 0) {
1740                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1741                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1742                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1743                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1744             }
1745
1746             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1747             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1748             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1749               faceRectangles, numFaces*4);
1750             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1751               faceLandmarks, numFaces*6);
1752
1753            break;
1754            }
1755         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1756             uint8_t  *color_correct_mode =
1757                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1758             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1759             break;
1760          }
1761         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
1762             int32_t  *ae_precapture_id =
1763                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1764             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1765             break;
1766          }
1767         case CAM_INTF_META_AEC_ROI: {
1768            cam_area_t  *hAeRegions =
1769                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1770             int32_t aeRegions[5];
1771             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1772             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1773             break;
1774          }
1775          case CAM_INTF_META_AEC_STATE:{
1776             uint8_t *ae_state =
1777                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1778             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1779             break;
1780          }
1781          case CAM_INTF_PARM_FOCUS_MODE:{
1782             uint8_t  *focusMode =
1783                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1784             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
1785                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
1786             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
1787             break;
1788          }
1789          case CAM_INTF_META_AF_ROI:{
1790             /*af regions*/
1791             cam_area_t  *hAfRegions =
1792                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1793             int32_t afRegions[5];
1794             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1795             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1796             break;
1797          }
1798          case CAM_INTF_META_AF_STATE: {
1799             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1800             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1801             break;
1802          }
1803          case CAM_INTF_META_AF_TRIGGER_ID: {
1804             int32_t  *afTriggerId =
1805                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1806             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1807             break;
1808          }
1809          case CAM_INTF_PARM_WHITE_BALANCE: {
1810               uint8_t  *whiteBalance =
1811                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1812               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1813                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1814                   *whiteBalance);
1815               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
1816               break;
1817          }
1818          case CAM_INTF_META_AWB_REGIONS: {
1819             /*awb regions*/
1820             cam_area_t  *hAwbRegions =
1821                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1822             int32_t awbRegions[5];
1823             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1824             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1825             break;
1826          }
1827          case CAM_INTF_META_AWB_STATE: {
1828             uint8_t  *whiteBalanceState =
1829                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1830             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1831             break;
1832          }
1833          case CAM_INTF_META_MODE: {
1834             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1835             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1836             break;
1837          }
1838          case CAM_INTF_META_EDGE_MODE: {
1839             cam_edge_application_t  *edgeApplication =
1840                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1841             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
1842             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
1843             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
1844             break;
1845          }
1846          case CAM_INTF_META_FLASH_POWER: {
1847             uint8_t  *flashPower =
1848                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1849             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1850             break;
1851          }
1852          case CAM_INTF_META_FLASH_FIRING_TIME: {
1853             int64_t  *flashFiringTime =
1854                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1855             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1856             break;
1857          }
1858          case CAM_INTF_META_FLASH_STATE: {
1859             uint8_t  *flashState =
1860                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1861             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1862             break;
1863          }
1864          case CAM_INTF_META_FLASH_MODE:{
1865             uint8_t *flashMode = (uint8_t*)
1866                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
1867             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
1868             break;
1869          }
1870          case CAM_INTF_META_HOTPIXEL_MODE: {
1871              uint8_t  *hotPixelMode =
1872                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1873              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1874              break;
1875          }
1876          case CAM_INTF_META_LENS_APERTURE:{
1877             float  *lensAperture =
1878                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1879             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1880             break;
1881          }
1882          case CAM_INTF_META_LENS_FILTERDENSITY: {
1883             float  *filterDensity =
1884                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1885             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1886             break;
1887          }
1888          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
1889             float  *focalLength =
1890                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1891             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1892             break;
1893          }
1894          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
1895             float  *focusDistance =
1896                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1897             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1898             break;
1899          }
1900          case CAM_INTF_META_LENS_FOCUS_RANGE: {
1901             float  *focusRange =
1902                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1903             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
1904             break;
1905          }
1906          case CAM_INTF_META_LENS_STATE: {
1907             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
1908             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
1909             break;
1910          }
1911          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
1912             uint8_t  *opticalStab =
1913                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1914             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1915             break;
1916          }
1917          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
1918             uint8_t  *noiseRedMode =
1919                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1920             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1921             break;
1922          }
1923          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
1924             uint8_t  *noiseRedStrength =
1925                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
1926             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
1927             break;
1928          }
1929          case CAM_INTF_META_SCALER_CROP_REGION: {
1930             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1931             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1932             int32_t scalerCropRegion[4];
1933             scalerCropRegion[0] = hScalerCropRegion->left;
1934             scalerCropRegion[1] = hScalerCropRegion->top;
1935             scalerCropRegion[2] = hScalerCropRegion->width;
1936             scalerCropRegion[3] = hScalerCropRegion->height;
1937             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1938             break;
1939          }
1940          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
1941             int64_t  *sensorExpTime =
1942                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1943             mMetadataResponse.exposure_time = *sensorExpTime;
1944             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1945             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1946             break;
1947          }
1948          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
1949             int64_t  *sensorFameDuration =
1950                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1951             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1952             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1953             break;
1954          }
1955          case CAM_INTF_META_SENSOR_SENSITIVITY:{
1956             int32_t  *sensorSensitivity =
1957                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1958             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1959             mMetadataResponse.iso_speed = *sensorSensitivity;
1960             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1961             break;
1962          }
1963          case CAM_INTF_META_SHADING_MODE: {
1964             uint8_t  *shadingMode =
1965                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1966             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1967             break;
1968          }
1969          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
1970             uint8_t  *faceDetectMode =
1971                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1972             uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1973                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1974                                                        *faceDetectMode);
1975             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1976             break;
1977          }
1978          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
1979             uint8_t  *histogramMode =
1980                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1981             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1982             break;
1983          }
1984          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
1985               uint8_t  *sharpnessMapMode =
1986                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1987               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1988                                  sharpnessMapMode, 1);
1989               break;
1990           }
1991          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
1992               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1993               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1994               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1995                                  (int32_t*)sharpnessMap->sharpness,
1996                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1997               break;
1998          }
1999          case CAM_INTF_META_LENS_SHADING_MAP: {
2000               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2001               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2002               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2003               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2004               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2005                                  (float*)lensShadingMap->lens_shading,
2006                                  4*map_width*map_height);
2007               break;
2008          }
2009          case CAM_INTF_META_TONEMAP_CURVES:{
2010             //Populate CAM_INTF_META_TONEMAP_CURVES
2011             /* ch0 = G, ch 1 = B, ch 2 = R*/
2012             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2013             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2014             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2015                                (float*)tonemap->curves[0].tonemap_points,
2016                                tonemap->tonemap_points_cnt * 2);
2017
2018             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2019                                (float*)tonemap->curves[1].tonemap_points,
2020                                tonemap->tonemap_points_cnt * 2);
2021
2022             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2023                                (float*)tonemap->curves[2].tonemap_points,
2024                                tonemap->tonemap_points_cnt * 2);
2025             break;
2026          }
2027          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2028             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2029             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2030             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2031             break;
2032          }
2033          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2034              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2035              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2036              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2037                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2038              break;
2039          }
2040          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2041             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2042             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2043             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2044                       predColorCorrectionGains->gains, 4);
2045             break;
2046          }
2047          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2048             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2049                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2050             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2051                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2052             break;
2053
2054          }
2055          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2056             uint8_t *blackLevelLock = (uint8_t*)
2057               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2058             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2059             break;
2060          }
2061          case CAM_INTF_META_SCENE_FLICKER:{
2062             uint8_t *sceneFlicker = (uint8_t*)
2063             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2064             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2065             break;
2066          }
2067          case CAM_INTF_PARM_LED_MODE:
2068             break;
2069          case CAM_INTF_PARM_EFFECT: {
2070             uint8_t *effectMode = (uint8_t*)
2071                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2072             uint8_t fwk_effectMode = lookupFwkName(EFFECT_MODES_MAP,
2073                                                    sizeof(EFFECT_MODES_MAP),
2074                                                    *effectMode);
2075             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2076             break;
2077          }
2078          default:
2079             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2080                   __func__, curr_entry);
2081             break;
2082       }
2083       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2084       curr_entry = next_entry;
2085    }
2086    resultMetadata = camMetadata.release();
2087    return resultMetadata;
2088}
2089
2090/*===========================================================================
2091 * FUNCTION   : convertToRegions
2092 *
2093 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2094 *
2095 * PARAMETERS :
2096 *   @rect   : cam_rect_t struct to convert
2097 *   @region : int32_t destination array
2098 *   @weight : if we are converting from cam_area_t, weight is valid
2099 *             else weight = -1
2100 *
2101 *==========================================================================*/
2102void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2103    region[0] = rect.left;
2104    region[1] = rect.top;
2105    region[2] = rect.left + rect.width;
2106    region[3] = rect.top + rect.height;
2107    if (weight > -1) {
2108        region[4] = weight;
2109    }
2110}
2111
2112/*===========================================================================
2113 * FUNCTION   : convertFromRegions
2114 *
2115 * DESCRIPTION: helper method to convert from array to cam_rect_t
2116 *
2117 * PARAMETERS :
2118 *   @rect   : cam_rect_t struct to convert
2119 *   @region : int32_t destination array
2120 *   @weight : if we are converting from cam_area_t, weight is valid
2121 *             else weight = -1
2122 *
2123 *==========================================================================*/
2124void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2125                                                   const camera_metadata_t *settings,
2126                                                   uint32_t tag){
2127    CameraMetadata frame_settings;
2128    frame_settings = settings;
2129    int32_t x_min = frame_settings.find(tag).data.i32[0];
2130    int32_t y_min = frame_settings.find(tag).data.i32[1];
2131    int32_t x_max = frame_settings.find(tag).data.i32[2];
2132    int32_t y_max = frame_settings.find(tag).data.i32[3];
2133    roi->weight = frame_settings.find(tag).data.i32[4];
2134    roi->rect.left = x_min;
2135    roi->rect.top = y_min;
2136    roi->rect.width = x_max - x_min;
2137    roi->rect.height = y_max - y_min;
2138}
2139
2140/*===========================================================================
2141 * FUNCTION   : resetIfNeededROI
2142 *
2143 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2144 *              crop region
2145 *
2146 * PARAMETERS :
2147 *   @roi       : cam_area_t struct to resize
2148 *   @scalerCropRegion : cam_crop_region_t region to compare against
2149 *
2150 *
2151 *==========================================================================*/
2152bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2153                                                 const cam_crop_region_t* scalerCropRegion)
2154{
2155    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2156    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2157    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2158    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2159    if ((roi_x_max < scalerCropRegion->left) ||
2160        (roi_y_max < scalerCropRegion->top)  ||
2161        (roi->rect.left > crop_x_max) ||
2162        (roi->rect.top > crop_y_max)){
2163        return false;
2164    }
2165    if (roi->rect.left < scalerCropRegion->left) {
2166        roi->rect.left = scalerCropRegion->left;
2167    }
2168    if (roi->rect.top < scalerCropRegion->top) {
2169        roi->rect.top = scalerCropRegion->top;
2170    }
2171    if (roi_x_max > crop_x_max) {
2172        roi_x_max = crop_x_max;
2173    }
2174    if (roi_y_max > crop_y_max) {
2175        roi_y_max = crop_y_max;
2176    }
2177    roi->rect.width = roi_x_max - roi->rect.left;
2178    roi->rect.height = roi_y_max - roi->rect.top;
2179    return true;
2180}
2181
2182/*===========================================================================
2183 * FUNCTION   : convertLandmarks
2184 *
2185 * DESCRIPTION: helper method to extract the landmarks from face detection info
2186 *
2187 * PARAMETERS :
2188 *   @face   : cam_rect_t struct to convert
2189 *   @landmarks : int32_t destination array
2190 *
2191 *
2192 *==========================================================================*/
2193void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2194{
2195    landmarks[0] = face.left_eye_center.x;
2196    landmarks[1] = face.left_eye_center.y;
2197    landmarks[2] = face.right_eye_center.x;
2198    landmarks[3] = face.right_eye_center.y;
2199    landmarks[4] = face.mouth_center.x;
2200    landmarks[5] = face.mouth_center.y;
2201}
2202
2203#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2204/*===========================================================================
2205 * FUNCTION   : initCapabilities
2206 *
2207 * DESCRIPTION: initialize camera capabilities in static data struct
2208 *
2209 * PARAMETERS :
2210 *   @cameraId  : camera Id
2211 *
2212 * RETURN     : int32_t type of status
2213 *              NO_ERROR  -- success
2214 *              none-zero failure code
2215 *==========================================================================*/
2216int QCamera3HardwareInterface::initCapabilities(int cameraId)
2217{
2218    int rc = 0;
2219    mm_camera_vtbl_t *cameraHandle = NULL;
2220    QCamera3HeapMemory *capabilityHeap = NULL;
2221
2222    cameraHandle = camera_open(cameraId);
2223    if (!cameraHandle) {
2224        ALOGE("%s: camera_open failed", __func__);
2225        rc = -1;
2226        goto open_failed;
2227    }
2228
2229    capabilityHeap = new QCamera3HeapMemory();
2230    if (capabilityHeap == NULL) {
2231        ALOGE("%s: creation of capabilityHeap failed", __func__);
2232        goto heap_creation_failed;
2233    }
2234    /* Allocate memory for capability buffer */
2235    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2236    if(rc != OK) {
2237        ALOGE("%s: No memory for cappability", __func__);
2238        goto allocate_failed;
2239    }
2240
2241    /* Map memory for capability buffer */
2242    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2243    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2244                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2245                                capabilityHeap->getFd(0),
2246                                sizeof(cam_capability_t));
2247    if(rc < 0) {
2248        ALOGE("%s: failed to map capability buffer", __func__);
2249        goto map_failed;
2250    }
2251
2252    /* Query Capability */
2253    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2254    if(rc < 0) {
2255        ALOGE("%s: failed to query capability",__func__);
2256        goto query_failed;
2257    }
2258    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2259    if (!gCamCapability[cameraId]) {
2260        ALOGE("%s: out of memory", __func__);
2261        goto query_failed;
2262    }
2263    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2264                                        sizeof(cam_capability_t));
2265    rc = 0;
2266
2267query_failed:
2268    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2269                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2270map_failed:
2271    capabilityHeap->deallocate();
2272allocate_failed:
2273    delete capabilityHeap;
2274heap_creation_failed:
2275    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2276    cameraHandle = NULL;
2277open_failed:
2278    return rc;
2279}
2280
2281/*===========================================================================
2282 * FUNCTION   : initParameters
2283 *
2284 * DESCRIPTION: initialize camera parameters
2285 *
2286 * PARAMETERS :
2287 *
2288 * RETURN     : int32_t type of status
2289 *              NO_ERROR  -- success
2290 *              none-zero failure code
2291 *==========================================================================*/
2292int QCamera3HardwareInterface::initParameters()
2293{
2294    int rc = 0;
2295
2296    //Allocate Set Param Buffer
2297    mParamHeap = new QCamera3HeapMemory();
2298    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
2299    if(rc != OK) {
2300        rc = NO_MEMORY;
2301        ALOGE("Failed to allocate SETPARM Heap memory");
2302        delete mParamHeap;
2303        mParamHeap = NULL;
2304        return rc;
2305    }
2306
2307    //Map memory for parameters buffer
2308    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2309            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2310            mParamHeap->getFd(0),
2311            sizeof(parm_buffer_t));
2312    if(rc < 0) {
2313        ALOGE("%s:failed to map SETPARM buffer",__func__);
2314        rc = FAILED_TRANSACTION;
2315        mParamHeap->deallocate();
2316        delete mParamHeap;
2317        mParamHeap = NULL;
2318        return rc;
2319    }
2320
2321    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
2322    return rc;
2323}
2324
2325/*===========================================================================
2326 * FUNCTION   : deinitParameters
2327 *
2328 * DESCRIPTION: de-initialize camera parameters
2329 *
2330 * PARAMETERS :
2331 *
2332 * RETURN     : NONE
2333 *==========================================================================*/
2334void QCamera3HardwareInterface::deinitParameters()
2335{
2336    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2337            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2338
2339    mParamHeap->deallocate();
2340    delete mParamHeap;
2341    mParamHeap = NULL;
2342
2343    mParameters = NULL;
2344}
2345
2346/*===========================================================================
2347 * FUNCTION   : calcMaxJpegSize
2348 *
2349 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2350 *
2351 * PARAMETERS :
2352 *
2353 * RETURN     : max_jpeg_size
2354 *==========================================================================*/
2355int QCamera3HardwareInterface::calcMaxJpegSize()
2356{
2357    int32_t max_jpeg_size = 0;
2358    int temp_width, temp_height;
2359    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2360        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2361        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2362        if (temp_width * temp_height > max_jpeg_size ) {
2363            max_jpeg_size = temp_width * temp_height;
2364        }
2365    }
2366    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2367    return max_jpeg_size;
2368}
2369
2370/*===========================================================================
2371 * FUNCTION   : initStaticMetadata
2372 *
2373 * DESCRIPTION: initialize the static metadata
2374 *
2375 * PARAMETERS :
2376 *   @cameraId  : camera Id
2377 *
2378 * RETURN     : int32_t type of status
2379 *              0  -- success
2380 *              non-zero failure code
2381 *==========================================================================*/
2382int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2383{
2384    int rc = 0;
2385    CameraMetadata staticInfo;
2386
2387    /* android.info: hardware level */
2388    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2389    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2390        &supportedHardwareLevel, 1);
2391
2392    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2393    /*HAL 3 only*/
2394    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2395                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2396
2397    /*hard coded for now but this should come from sensor*/
2398    float min_focus_distance;
2399    if(facingBack){
2400        min_focus_distance = 10;
2401    } else {
2402        min_focus_distance = 0;
2403    }
2404    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2405                    &min_focus_distance, 1);
2406
2407    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2408                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2409
2410    /*should be using focal lengths but sensor doesn't provide that info now*/
2411    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2412                      &gCamCapability[cameraId]->focal_length,
2413                      1);
2414
2415    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2416                      gCamCapability[cameraId]->apertures,
2417                      gCamCapability[cameraId]->apertures_count);
2418
2419    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2420                gCamCapability[cameraId]->filter_densities,
2421                gCamCapability[cameraId]->filter_densities_count);
2422
2423
2424    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2425                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2426                      gCamCapability[cameraId]->optical_stab_modes_count);
2427
2428    staticInfo.update(ANDROID_LENS_POSITION,
2429                      gCamCapability[cameraId]->lens_position,
2430                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2431
2432    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2433                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2434    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2435                      lens_shading_map_size,
2436                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2437
2438    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2439                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2440    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2441            geo_correction_map_size,
2442            sizeof(geo_correction_map_size)/sizeof(int32_t));
2443
2444    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2445                       gCamCapability[cameraId]->geo_correction_map,
2446                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2447
2448    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2449            gCamCapability[cameraId]->sensor_physical_size, 2);
2450
2451    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2452            gCamCapability[cameraId]->exposure_time_range, 2);
2453
2454    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2455            &gCamCapability[cameraId]->max_frame_duration, 1);
2456
2457    camera_metadata_rational baseGainFactor = {
2458            gCamCapability[cameraId]->base_gain_factor.numerator,
2459            gCamCapability[cameraId]->base_gain_factor.denominator};
2460    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2461                      &baseGainFactor, 1);
2462
2463    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2464                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2465
2466    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2467                                               gCamCapability[cameraId]->pixel_array_size.height};
2468    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2469                      pixel_array_size, 2);
2470
2471    int32_t active_array_size[] = {0, 0,
2472                                                gCamCapability[cameraId]->active_array_size.width,
2473                                                gCamCapability[cameraId]->active_array_size.height};
2474    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2475                      active_array_size, 4);
2476
2477    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2478            &gCamCapability[cameraId]->white_level, 1);
2479
2480    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2481            gCamCapability[cameraId]->black_level_pattern, 4);
2482
2483    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2484                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2485
2486    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2487                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2488
2489    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2490                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2491
2492    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2493                      &gCamCapability[cameraId]->histogram_size, 1);
2494
2495    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2496            &gCamCapability[cameraId]->max_histogram_count, 1);
2497
2498    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2499                                                gCamCapability[cameraId]->sharpness_map_size.height};
2500
2501    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2502            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2503
2504    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2505            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2506
2507
2508    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2509                      &gCamCapability[cameraId]->raw_min_duration,
2510                       1);
2511
2512    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2513                                                HAL_PIXEL_FORMAT_BLOB};
2514    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2515    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2516                      scalar_formats,
2517                      scalar_formats_count);
2518
2519    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2520    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2521              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2522              available_processed_sizes);
2523    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2524                available_processed_sizes,
2525                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2526
2527    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2528                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2529                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2530
2531    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2532    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2533                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2534                 available_fps_ranges);
2535    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2536            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2537
2538    camera_metadata_rational exposureCompensationStep = {
2539            gCamCapability[cameraId]->exp_compensation_step.numerator,
2540            gCamCapability[cameraId]->exp_compensation_step.denominator};
2541    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2542                      &exposureCompensationStep, 1);
2543
2544    /*TO DO*/
2545    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2546    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2547                      availableVstabModes, sizeof(availableVstabModes));
2548
2549    /*HAL 1 and HAL 3 common*/
2550    float maxZoom = 4;
2551    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2552            &maxZoom, 1);
2553
2554    int32_t max3aRegions = 1;
2555    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2556            &max3aRegions, 1);
2557
2558    uint8_t availableFaceDetectModes[] = {
2559            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2560            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2561    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2562                      availableFaceDetectModes,
2563                      sizeof(availableFaceDetectModes));
2564
2565    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2566                                                        gCamCapability[cameraId]->exposure_compensation_max};
2567    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2568            exposureCompensationRange,
2569            sizeof(exposureCompensationRange)/sizeof(int32_t));
2570
2571    uint8_t lensFacing = (facingBack) ?
2572            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2573    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2574
2575    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2576                available_processed_sizes,
2577                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2578
2579    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2580                      available_thumbnail_sizes,
2581                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2582
2583    int32_t max_jpeg_size = 0;
2584    int temp_width, temp_height;
2585    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2586        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2587        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2588        if (temp_width * temp_height > max_jpeg_size ) {
2589            max_jpeg_size = temp_width * temp_height;
2590        }
2591    }
2592    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2593    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2594                      &max_jpeg_size, 1);
2595
2596    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2597    int32_t size = 0;
2598    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2599        int val = lookupFwkName(EFFECT_MODES_MAP,
2600                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2601                                   gCamCapability[cameraId]->supported_effects[i]);
2602        if (val != NAME_NOT_FOUND) {
2603            avail_effects[size] = (uint8_t)val;
2604            size++;
2605        }
2606    }
2607    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2608                      avail_effects,
2609                      size);
2610
2611    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2612    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2613    int32_t supported_scene_modes_cnt = 0;
2614    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2615        int val = lookupFwkName(SCENE_MODES_MAP,
2616                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2617                                gCamCapability[cameraId]->supported_scene_modes[i]);
2618        if (val != NAME_NOT_FOUND) {
2619            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2620            supported_indexes[supported_scene_modes_cnt] = i;
2621            supported_scene_modes_cnt++;
2622        }
2623    }
2624
2625    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2626                      avail_scene_modes,
2627                      supported_scene_modes_cnt);
2628
2629    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2630    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2631                      supported_scene_modes_cnt,
2632                      scene_mode_overrides,
2633                      supported_indexes,
2634                      cameraId);
2635    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2636                      scene_mode_overrides,
2637                      supported_scene_modes_cnt*3);
2638
2639    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2640    size = 0;
2641    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2642        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2643                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2644                                 gCamCapability[cameraId]->supported_antibandings[i]);
2645        if (val != NAME_NOT_FOUND) {
2646            avail_antibanding_modes[size] = (uint8_t)val;
2647            size++;
2648        }
2649
2650    }
2651    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2652                      avail_antibanding_modes,
2653                      size);
2654
2655    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2656    size = 0;
2657    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2658        int val = lookupFwkName(FOCUS_MODES_MAP,
2659                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2660                                gCamCapability[cameraId]->supported_focus_modes[i]);
2661        if (val != NAME_NOT_FOUND) {
2662            avail_af_modes[size] = (uint8_t)val;
2663            size++;
2664        }
2665    }
2666    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2667                      avail_af_modes,
2668                      size);
2669
2670    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2671    size = 0;
2672    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2673        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2674                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2675                                    gCamCapability[cameraId]->supported_white_balances[i]);
2676        if (val != NAME_NOT_FOUND) {
2677            avail_awb_modes[size] = (uint8_t)val;
2678            size++;
2679        }
2680    }
2681    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2682                      avail_awb_modes,
2683                      size);
2684
2685    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2686    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2687      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2688
2689    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2690            available_flash_levels,
2691            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2692
2693
2694    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2695    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2696            &flashAvailable, 1);
2697
2698    uint8_t avail_ae_modes[5];
2699    size = 0;
2700    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2701        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2702        size++;
2703    }
2704    if (flashAvailable) {
2705        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2706        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2707        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2708    }
2709    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2710                      avail_ae_modes,
2711                      size);
2712
2713    int32_t sensitivity_range[2];
2714    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2715    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2716    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2717                      sensitivity_range,
2718                      sizeof(sensitivity_range) / sizeof(int32_t));
2719
2720    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2721                      &gCamCapability[cameraId]->max_analog_sensitivity,
2722                      1);
2723
2724    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2725                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2726                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2727
2728    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2729    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2730                      &sensor_orientation,
2731                      1);
2732
2733    int32_t max_output_streams[3] = {1, 3, 1};
2734    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2735                      max_output_streams,
2736                      3);
2737
2738    gStaticMetadata[cameraId] = staticInfo.release();
2739    return rc;
2740}
2741
2742/*===========================================================================
2743 * FUNCTION   : makeTable
2744 *
2745 * DESCRIPTION: make a table of sizes
2746 *
2747 * PARAMETERS :
2748 *
2749 *
2750 *==========================================================================*/
2751void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2752                                          int32_t* sizeTable)
2753{
2754    int j = 0;
2755    for (int i = 0; i < size; i++) {
2756        sizeTable[j] = dimTable[i].width;
2757        sizeTable[j+1] = dimTable[i].height;
2758        j+=2;
2759    }
2760}
2761
2762/*===========================================================================
2763 * FUNCTION   : makeFPSTable
2764 *
2765 * DESCRIPTION: make a table of fps ranges
2766 *
2767 * PARAMETERS :
2768 *
2769 *==========================================================================*/
2770void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2771                                          int32_t* fpsRangesTable)
2772{
2773    int j = 0;
2774    for (int i = 0; i < size; i++) {
2775        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2776        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2777        j+=2;
2778    }
2779}
2780
2781/*===========================================================================
2782 * FUNCTION   : makeOverridesList
2783 *
2784 * DESCRIPTION: make a list of scene mode overrides
2785 *
2786 * PARAMETERS :
2787 *
2788 *
2789 *==========================================================================*/
2790void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2791                                                  uint8_t size, uint8_t* overridesList,
2792                                                  uint8_t* supported_indexes,
2793                                                  int camera_id)
2794{
2795    /*daemon will give a list of overrides for all scene modes.
2796      However we should send the fwk only the overrides for the scene modes
2797      supported by the framework*/
2798    int j = 0, index = 0, supt = 0;
2799    uint8_t focus_override;
2800    for (int i = 0; i < size; i++) {
2801        supt = 0;
2802        index = supported_indexes[i];
2803        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2804        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2805                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2806                                                    overridesTable[index].awb_mode);
2807        focus_override = (uint8_t)overridesTable[index].af_mode;
2808        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2809           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2810              supt = 1;
2811              break;
2812           }
2813        }
2814        if (supt) {
2815           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2816                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2817                                              focus_override);
2818        } else {
2819           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2820        }
2821        j+=3;
2822    }
2823}
2824
2825/*===========================================================================
2826 * FUNCTION   : getPreviewHalPixelFormat
2827 *
2828 * DESCRIPTION: convert the format to type recognized by framework
2829 *
2830 * PARAMETERS : format : the format from backend
2831 *
2832 ** RETURN    : format recognized by framework
2833 *
2834 *==========================================================================*/
2835int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2836{
2837    int32_t halPixelFormat;
2838
2839    switch (format) {
2840    case CAM_FORMAT_YUV_420_NV12:
2841        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2842        break;
2843    case CAM_FORMAT_YUV_420_NV21:
2844        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2845        break;
2846    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2847        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2848        break;
2849    case CAM_FORMAT_YUV_420_YV12:
2850        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2851        break;
2852    case CAM_FORMAT_YUV_422_NV16:
2853    case CAM_FORMAT_YUV_422_NV61:
2854    default:
2855        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2856        break;
2857    }
2858    return halPixelFormat;
2859}
2860
2861/*===========================================================================
2862 * FUNCTION   : getSensorSensitivity
2863 *
2864 * DESCRIPTION: convert iso_mode to an integer value
2865 *
2866 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2867 *
2868 ** RETURN    : sensitivity supported by sensor
2869 *
2870 *==========================================================================*/
2871int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2872{
2873    int32_t sensitivity;
2874
2875    switch (iso_mode) {
2876    case CAM_ISO_MODE_100:
2877        sensitivity = 100;
2878        break;
2879    case CAM_ISO_MODE_200:
2880        sensitivity = 200;
2881        break;
2882    case CAM_ISO_MODE_400:
2883        sensitivity = 400;
2884        break;
2885    case CAM_ISO_MODE_800:
2886        sensitivity = 800;
2887        break;
2888    case CAM_ISO_MODE_1600:
2889        sensitivity = 1600;
2890        break;
2891    default:
2892        sensitivity = -1;
2893        break;
2894    }
2895    return sensitivity;
2896}
2897
2898
2899/*===========================================================================
2900 * FUNCTION   : AddSetParmEntryToBatch
2901 *
2902 * DESCRIPTION: add set parameter entry into batch
2903 *
2904 * PARAMETERS :
2905 *   @p_table     : ptr to parameter buffer
2906 *   @paramType   : parameter type
2907 *   @paramLength : length of parameter value
2908 *   @paramValue  : ptr to parameter value
2909 *
2910 * RETURN     : int32_t type of status
2911 *              NO_ERROR  -- success
2912 *              none-zero failure code
2913 *==========================================================================*/
2914int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2915                                                          cam_intf_parm_type_t paramType,
2916                                                          uint32_t paramLength,
2917                                                          void *paramValue)
2918{
2919    int position = paramType;
2920    int current, next;
2921
2922    /*************************************************************************
2923    *                 Code to take care of linking next flags                *
2924    *************************************************************************/
2925    current = GET_FIRST_PARAM_ID(p_table);
2926    if (position == current){
2927        //DO NOTHING
2928    } else if (position < current){
2929        SET_NEXT_PARAM_ID(position, p_table, current);
2930        SET_FIRST_PARAM_ID(p_table, position);
2931    } else {
2932        /* Search for the position in the linked list where we need to slot in*/
2933        while (position > GET_NEXT_PARAM_ID(current, p_table))
2934            current = GET_NEXT_PARAM_ID(current, p_table);
2935
2936        /*If node already exists no need to alter linking*/
2937        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2938            next = GET_NEXT_PARAM_ID(current, p_table);
2939            SET_NEXT_PARAM_ID(current, p_table, position);
2940            SET_NEXT_PARAM_ID(position, p_table, next);
2941        }
2942    }
2943
2944    /*************************************************************************
2945    *                   Copy contents into entry                             *
2946    *************************************************************************/
2947
2948    if (paramLength > sizeof(parm_type_t)) {
2949        ALOGE("%s:Size of input larger than max entry size",__func__);
2950        return BAD_VALUE;
2951    }
2952    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2953    return NO_ERROR;
2954}
2955
2956/*===========================================================================
2957 * FUNCTION   : lookupFwkName
2958 *
2959 * DESCRIPTION: In case the enum is not same in fwk and backend
2960 *              make sure the parameter is correctly propogated
2961 *
2962 * PARAMETERS  :
2963 *   @arr      : map between the two enums
2964 *   @len      : len of the map
2965 *   @hal_name : name of the hal_parm to map
2966 *
2967 * RETURN     : int type of status
2968 *              fwk_name  -- success
2969 *              none-zero failure code
2970 *==========================================================================*/
2971int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2972                                             int len, int hal_name)
2973{
2974
2975    for (int i = 0; i < len; i++) {
2976        if (arr[i].hal_name == hal_name)
2977            return arr[i].fwk_name;
2978    }
2979
2980    /* Not able to find matching framework type is not necessarily
2981     * an error case. This happens when mm-camera supports more attributes
2982     * than the frameworks do */
2983    ALOGD("%s: Cannot find matching framework type", __func__);
2984    return NAME_NOT_FOUND;
2985}
2986
2987/*===========================================================================
2988 * FUNCTION   : lookupHalName
2989 *
2990 * DESCRIPTION: In case the enum is not same in fwk and backend
2991 *              make sure the parameter is correctly propogated
2992 *
2993 * PARAMETERS  :
2994 *   @arr      : map between the two enums
2995 *   @len      : len of the map
2996 *   @fwk_name : name of the hal_parm to map
2997 *
2998 * RETURN     : int32_t type of status
2999 *              hal_name  -- success
3000 *              none-zero failure code
3001 *==========================================================================*/
3002int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
3003                                             int len, int fwk_name)
3004{
3005    for (int i = 0; i < len; i++) {
3006       if (arr[i].fwk_name == fwk_name)
3007           return arr[i].hal_name;
3008    }
3009    ALOGE("%s: Cannot find matching hal type", __func__);
3010    return NAME_NOT_FOUND;
3011}
3012
3013/*===========================================================================
3014 * FUNCTION   : getCapabilities
3015 *
3016 * DESCRIPTION: query camera capabilities
3017 *
3018 * PARAMETERS :
3019 *   @cameraId  : camera Id
3020 *   @info      : camera info struct to be filled in with camera capabilities
3021 *
3022 * RETURN     : int32_t type of status
3023 *              NO_ERROR  -- success
3024 *              none-zero failure code
3025 *==========================================================================*/
3026int QCamera3HardwareInterface::getCamInfo(int cameraId,
3027                                    struct camera_info *info)
3028{
3029    int rc = 0;
3030
3031    if (NULL == gCamCapability[cameraId]) {
3032        rc = initCapabilities(cameraId);
3033        if (rc < 0) {
3034            //pthread_mutex_unlock(&g_camlock);
3035            return rc;
3036        }
3037    }
3038
3039    if (NULL == gStaticMetadata[cameraId]) {
3040        rc = initStaticMetadata(cameraId);
3041        if (rc < 0) {
3042            return rc;
3043        }
3044    }
3045
3046    switch(gCamCapability[cameraId]->position) {
3047    case CAM_POSITION_BACK:
3048        info->facing = CAMERA_FACING_BACK;
3049        break;
3050
3051    case CAM_POSITION_FRONT:
3052        info->facing = CAMERA_FACING_FRONT;
3053        break;
3054
3055    default:
3056        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3057        rc = -1;
3058        break;
3059    }
3060
3061
3062    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3063    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
3064    info->static_camera_characteristics = gStaticMetadata[cameraId];
3065
3066    return rc;
3067}
3068
3069/*===========================================================================
3070 * FUNCTION   : translateMetadata
3071 *
3072 * DESCRIPTION: translate the metadata into camera_metadata_t
3073 *
3074 * PARAMETERS : type of the request
3075 *
3076 *
3077 * RETURN     : success: camera_metadata_t*
3078 *              failure: NULL
3079 *
3080 *==========================================================================*/
3081camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3082{
3083    pthread_mutex_lock(&mMutex);
3084
3085    if (mDefaultMetadata[type] != NULL) {
3086        pthread_mutex_unlock(&mMutex);
3087        return mDefaultMetadata[type];
3088    }
3089    //first time we are handling this request
3090    //fill up the metadata structure using the wrapper class
3091    CameraMetadata settings;
3092    //translate from cam_capability_t to camera_metadata_tag_t
3093    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3094    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3095    int32_t defaultRequestID = 0;
3096    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3097
3098    /*control*/
3099
3100    uint8_t controlIntent = 0;
3101    switch (type) {
3102      case CAMERA3_TEMPLATE_PREVIEW:
3103        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3104        break;
3105      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3106        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3107        break;
3108      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3109        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3110        break;
3111      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3112        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3113        break;
3114      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3115        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3116        break;
3117      default:
3118        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3119        break;
3120    }
3121    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3122
3123    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3124            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3125
3126    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3127    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3128
3129    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3130    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3131
3132    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3133    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3134
3135    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3136    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3137
3138    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3139    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3140
3141    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3142    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3143
3144    static uint8_t focusMode;
3145    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
3146        ALOGE("%s: Setting focus mode to auto", __func__);
3147        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
3148    } else {
3149        ALOGE("%s: Setting focus mode to off", __func__);
3150        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3151    }
3152    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3153
3154    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3155    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3156
3157    /*flash*/
3158    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3159    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3160
3161    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3162    settings.update(ANDROID_FLASH_FIRING_POWER,
3163            &flashFiringLevel, 1);
3164
3165    /* lens */
3166    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3167    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3168
3169    if (gCamCapability[mCameraId]->filter_densities_count) {
3170        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3171        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3172                        gCamCapability[mCameraId]->filter_densities_count);
3173    }
3174
3175    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3176    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3177
3178    /* Exposure time(Update the Min Exposure Time)*/
3179    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
3180    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
3181
3182    /* frame duration */
3183    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
3184    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
3185
3186    /* sensitivity */
3187    static const int32_t default_sensitivity = 100;
3188    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
3189
3190    /*edge mode*/
3191    static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
3192    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
3193
3194    /*noise reduction mode*/
3195    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
3196    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
3197
3198    /*color correction mode*/
3199    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
3200    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
3201
3202    /*transform matrix mode*/
3203    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
3204    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
3205
3206    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
3207    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
3208
3209    int32_t scaler_crop_region[4];
3210    scaler_crop_region[0] = 0;
3211    scaler_crop_region[1] = 0;
3212    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
3213    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
3214    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
3215
3216    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
3217    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
3218
3219    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3220    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
3221
3222    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
3223                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
3224                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
3225    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
3226
3227    mDefaultMetadata[type] = settings.release();
3228
3229    pthread_mutex_unlock(&mMutex);
3230    return mDefaultMetadata[type];
3231}
3232
3233/*===========================================================================
3234 * FUNCTION   : setFrameParameters
3235 *
3236 * DESCRIPTION: set parameters per frame as requested in the metadata from
3237 *              framework
3238 *
3239 * PARAMETERS :
3240 *   @request   : request that needs to be serviced
3241 *   @streamTypeMask : bit mask of stream types on which buffers are requested
3242 *
3243 * RETURN     : success: NO_ERROR
3244 *              failure:
3245 *==========================================================================*/
3246int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
3247                    uint32_t streamTypeMask)
3248{
3249    /*translate from camera_metadata_t type to parm_type_t*/
3250    int rc = 0;
3251    if (request->settings == NULL && mFirstRequest) {
3252        /*settings cannot be null for the first request*/
3253        return BAD_VALUE;
3254    }
3255
3256    int32_t hal_version = CAM_HAL_V3;
3257
3258    memset(mParameters, 0, sizeof(parm_buffer_t));
3259    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
3260    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
3261                sizeof(hal_version), &hal_version);
3262    if (rc < 0) {
3263        ALOGE("%s: Failed to set hal version in the parameters", __func__);
3264        return BAD_VALUE;
3265    }
3266
3267    /*we need to update the frame number in the parameters*/
3268    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
3269                                sizeof(request->frame_number), &(request->frame_number));
3270    if (rc < 0) {
3271        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3272        return BAD_VALUE;
3273    }
3274
3275    /* Update stream id mask where buffers are requested */
3276    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
3277                                sizeof(streamTypeMask), &streamTypeMask);
3278    if (rc < 0) {
3279        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
3280        return BAD_VALUE;
3281    }
3282
3283    if(request->settings != NULL){
3284        rc = translateMetadataToParameters(request);
3285    }
3286    /*set the parameters to backend*/
3287    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
3288    return rc;
3289}
3290
3291/*===========================================================================
3292 * FUNCTION   : translateMetadataToParameters
3293 *
3294 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
3295 *
3296 *
3297 * PARAMETERS :
3298 *   @request  : request sent from framework
3299 *
3300 *
3301 * RETURN     : success: NO_ERROR
3302 *              failure:
3303 *==========================================================================*/
3304int QCamera3HardwareInterface::translateMetadataToParameters
3305                                  (const camera3_capture_request_t *request)
3306{
3307    int rc = 0;
3308    CameraMetadata frame_settings;
3309    frame_settings = request->settings;
3310
3311    /* Do not change the order of the following list unless you know what you are
3312     * doing.
3313     * The order is laid out in such a way that parameters in the front of the table
3314     * may be used to override the parameters later in the table. Examples are:
3315     * 1. META_MODE should precede AEC/AWB/AF MODE
3316     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
3317     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
3318     * 4. Any mode should precede it's corresponding settings
3319     */
3320    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3321        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3322        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3323                sizeof(metaMode), &metaMode);
3324        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3325           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3326           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3327                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3328                                             fwk_sceneMode);
3329           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3330                sizeof(sceneMode), &sceneMode);
3331        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3332           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3333           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3334                sizeof(sceneMode), &sceneMode);
3335        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3336           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
3337           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3338                sizeof(sceneMode), &sceneMode);
3339        }
3340    }
3341
3342    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3343        uint8_t fwk_aeMode =
3344            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3345        uint8_t aeMode;
3346        int32_t redeye;
3347
3348        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
3349            aeMode = CAM_AE_MODE_OFF;
3350        } else {
3351            aeMode = CAM_AE_MODE_ON;
3352        }
3353        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
3354            redeye = 1;
3355        } else {
3356            redeye = 0;
3357        }
3358
3359        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
3360                                          sizeof(AE_FLASH_MODE_MAP),
3361                                          fwk_aeMode);
3362        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
3363                sizeof(aeMode), &aeMode);
3364        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3365                sizeof(flashMode), &flashMode);
3366        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
3367                sizeof(redeye), &redeye);
3368    }
3369
3370    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3371        uint8_t fwk_whiteLevel =
3372            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3373        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
3374                sizeof(WHITE_BALANCE_MODES_MAP),
3375                fwk_whiteLevel);
3376        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
3377                sizeof(whiteLevel), &whiteLevel);
3378    }
3379
3380    float focalDistance = -1.0;
3381    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
3382        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
3383        rc = AddSetParmEntryToBatch(mParameters,
3384                CAM_INTF_META_LENS_FOCUS_DISTANCE,
3385                sizeof(focalDistance), &focalDistance);
3386    }
3387
3388    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
3389        uint8_t fwk_focusMode =
3390            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
3391        uint8_t focusMode;
3392        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
3393            focusMode = CAM_FOCUS_MODE_INFINITY;
3394        } else{
3395         focusMode = lookupHalName(FOCUS_MODES_MAP,
3396                                   sizeof(FOCUS_MODES_MAP),
3397                                   fwk_focusMode);
3398        }
3399        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
3400                sizeof(focusMode), &focusMode);
3401    }
3402
3403    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
3404        int32_t antibandingMode =
3405            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
3406        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
3407                sizeof(antibandingMode), &antibandingMode);
3408    }
3409
3410    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3411        int32_t expCompensation = frame_settings.find(
3412            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3413        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
3414            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
3415        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
3416            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
3417        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
3418          sizeof(expCompensation), &expCompensation);
3419    }
3420
3421    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
3422        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
3423        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
3424                sizeof(aeLock), &aeLock);
3425    }
3426    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3427        cam_fps_range_t fps_range;
3428        fps_range.min_fps =
3429            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
3430        fps_range.max_fps =
3431            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3432        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
3433                sizeof(fps_range), &fps_range);
3434    }
3435
3436    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
3437        uint8_t awbLock =
3438            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
3439        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
3440                sizeof(awbLock), &awbLock);
3441    }
3442
3443    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
3444        uint8_t fwk_effectMode =
3445            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
3446        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
3447                sizeof(EFFECT_MODES_MAP),
3448                fwk_effectMode);
3449        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
3450                sizeof(effectMode), &effectMode);
3451    }
3452
3453    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3454        uint8_t colorCorrectMode =
3455            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3456        rc =
3457            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3458                    sizeof(colorCorrectMode), &colorCorrectMode);
3459    }
3460
3461    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3462        cam_color_correct_gains_t colorCorrectGains;
3463        for (int i = 0; i < 4; i++) {
3464            colorCorrectGains.gains[i] =
3465                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3466        }
3467        rc =
3468            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3469                    sizeof(colorCorrectGains), &colorCorrectGains);
3470    }
3471
3472    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3473        cam_color_correct_matrix_t colorCorrectTransform;
3474        cam_rational_type_t transform_elem;
3475        int num = 0;
3476        for (int i = 0; i < 3; i++) {
3477           for (int j = 0; j < 3; j++) {
3478              transform_elem.numerator =
3479                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3480              transform_elem.denominator =
3481                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3482              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3483              num++;
3484           }
3485        }
3486        rc =
3487            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3488                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3489    }
3490
3491    cam_trigger_t aecTrigger;
3492    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3493    aecTrigger.trigger_id = -1;
3494    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3495        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3496        aecTrigger.trigger =
3497            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3498        aecTrigger.trigger_id =
3499            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3500    }
3501    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3502                                sizeof(aecTrigger), &aecTrigger);
3503
3504    /*af_trigger must come with a trigger id*/
3505    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3506        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3507        cam_trigger_t af_trigger;
3508        af_trigger.trigger =
3509            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3510        af_trigger.trigger_id =
3511            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3512        rc = AddSetParmEntryToBatch(mParameters,
3513                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3514    }
3515
3516    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3517        int32_t demosaic =
3518            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3519        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3520                sizeof(demosaic), &demosaic);
3521    }
3522
3523    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3524        cam_edge_application_t edge_application;
3525        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3526        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
3527            edge_application.sharpness = 0;
3528        } else {
3529            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3530                uint8_t edgeStrength =
3531                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
3532                edge_application.sharpness = (int32_t)edgeStrength;
3533            } else {
3534                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
3535            }
3536        }
3537        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3538                sizeof(edge_application), &edge_application);
3539    }
3540
3541    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3542        int32_t respectFlashMode = 1;
3543        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3544            uint8_t fwk_aeMode =
3545                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3546            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3547                respectFlashMode = 0;
3548                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3549                    __func__);
3550            }
3551        }
3552        if (respectFlashMode) {
3553            uint8_t flashMode =
3554                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3555            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3556                                          sizeof(FLASH_MODES_MAP),
3557                                          flashMode);
3558            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3559            // To check: CAM_INTF_META_FLASH_MODE usage
3560            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3561                          sizeof(flashMode), &flashMode);
3562        }
3563    }
3564
3565    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3566        uint8_t flashPower =
3567            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3568        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3569                sizeof(flashPower), &flashPower);
3570    }
3571
3572    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3573        int64_t flashFiringTime =
3574            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3575        rc = AddSetParmEntryToBatch(mParameters,
3576                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3577    }
3578
3579    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3580        uint8_t geometricMode =
3581            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3582        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3583                sizeof(geometricMode), &geometricMode);
3584    }
3585
3586    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3587        uint8_t geometricStrength =
3588            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3589        rc = AddSetParmEntryToBatch(mParameters,
3590                CAM_INTF_META_GEOMETRIC_STRENGTH,
3591                sizeof(geometricStrength), &geometricStrength);
3592    }
3593
3594    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3595        uint8_t hotPixelMode =
3596            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3597        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3598                sizeof(hotPixelMode), &hotPixelMode);
3599    }
3600
3601    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3602        float lensAperture =
3603            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3604        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3605                sizeof(lensAperture), &lensAperture);
3606    }
3607
3608    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3609        float filterDensity =
3610            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3611        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3612                sizeof(filterDensity), &filterDensity);
3613    }
3614
3615    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3616        float focalLength =
3617            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3618        rc = AddSetParmEntryToBatch(mParameters,
3619                CAM_INTF_META_LENS_FOCAL_LENGTH,
3620                sizeof(focalLength), &focalLength);
3621    }
3622
3623    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3624        uint8_t optStabMode =
3625            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3626        rc = AddSetParmEntryToBatch(mParameters,
3627                CAM_INTF_META_LENS_OPT_STAB_MODE,
3628                sizeof(optStabMode), &optStabMode);
3629    }
3630
3631    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3632        uint8_t noiseRedMode =
3633            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3634        rc = AddSetParmEntryToBatch(mParameters,
3635                CAM_INTF_META_NOISE_REDUCTION_MODE,
3636                sizeof(noiseRedMode), &noiseRedMode);
3637    }
3638
3639    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3640        uint8_t noiseRedStrength =
3641            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3642        rc = AddSetParmEntryToBatch(mParameters,
3643                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3644                sizeof(noiseRedStrength), &noiseRedStrength);
3645    }
3646
3647    cam_crop_region_t scalerCropRegion;
3648    bool scalerCropSet = false;
3649    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3650        scalerCropRegion.left =
3651            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3652        scalerCropRegion.top =
3653            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3654        scalerCropRegion.width =
3655            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3656        scalerCropRegion.height =
3657            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3658        rc = AddSetParmEntryToBatch(mParameters,
3659                CAM_INTF_META_SCALER_CROP_REGION,
3660                sizeof(scalerCropRegion), &scalerCropRegion);
3661        scalerCropSet = true;
3662    }
3663
3664    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3665        int64_t sensorExpTime =
3666            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3667        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3668        rc = AddSetParmEntryToBatch(mParameters,
3669                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3670                sizeof(sensorExpTime), &sensorExpTime);
3671    }
3672
3673    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3674        int64_t sensorFrameDuration =
3675            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3676        int64_t minFrameDuration = getMinFrameDuration(request);
3677        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3678        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3679            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3680        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3681        rc = AddSetParmEntryToBatch(mParameters,
3682                CAM_INTF_META_SENSOR_FRAME_DURATION,
3683                sizeof(sensorFrameDuration), &sensorFrameDuration);
3684    }
3685
3686    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3687        int32_t sensorSensitivity =
3688            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3689        if (sensorSensitivity <
3690                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3691            sensorSensitivity =
3692                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3693        if (sensorSensitivity >
3694                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3695            sensorSensitivity =
3696                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3697        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3698        rc = AddSetParmEntryToBatch(mParameters,
3699                CAM_INTF_META_SENSOR_SENSITIVITY,
3700                sizeof(sensorSensitivity), &sensorSensitivity);
3701    }
3702
3703    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3704        int32_t shadingMode =
3705            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3706        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3707                sizeof(shadingMode), &shadingMode);
3708    }
3709
3710    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3711        uint8_t shadingStrength =
3712            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3713        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3714                sizeof(shadingStrength), &shadingStrength);
3715    }
3716
3717    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3718        uint8_t fwk_facedetectMode =
3719            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3720        uint8_t facedetectMode =
3721            lookupHalName(FACEDETECT_MODES_MAP,
3722                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3723        rc = AddSetParmEntryToBatch(mParameters,
3724                CAM_INTF_META_STATS_FACEDETECT_MODE,
3725                sizeof(facedetectMode), &facedetectMode);
3726    }
3727
3728    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3729        uint8_t histogramMode =
3730            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3731        rc = AddSetParmEntryToBatch(mParameters,
3732                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3733                sizeof(histogramMode), &histogramMode);
3734    }
3735
3736    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3737        uint8_t sharpnessMapMode =
3738            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3739        rc = AddSetParmEntryToBatch(mParameters,
3740                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3741                sizeof(sharpnessMapMode), &sharpnessMapMode);
3742    }
3743
3744    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3745        uint8_t tonemapMode =
3746            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3747        rc = AddSetParmEntryToBatch(mParameters,
3748                CAM_INTF_META_TONEMAP_MODE,
3749                sizeof(tonemapMode), &tonemapMode);
3750    }
3751    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
3752    /*All tonemap channels will have the same number of points*/
3753    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
3754        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
3755        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3756        cam_rgb_tonemap_curves tonemapCurves;
3757        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
3758
3759        /* ch0 = G*/
3760        int point = 0;
3761        cam_tonemap_curve_t tonemapCurveGreen;
3762        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
3763            for (int j = 0; j < 2; j++) {
3764               tonemapCurveGreen.tonemap_points[i][j] =
3765                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3766               point++;
3767            }
3768        }
3769        tonemapCurves.curves[0] = tonemapCurveGreen;
3770
3771        /* ch 1 = B */
3772        point = 0;
3773        cam_tonemap_curve_t tonemapCurveBlue;
3774        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3775            for (int j = 0; j < 2; j++) {
3776               tonemapCurveBlue.tonemap_points[i][j] =
3777                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3778               point++;
3779            }
3780        }
3781        tonemapCurves.curves[1] = tonemapCurveBlue;
3782
3783        /* ch 2 = R */
3784        point = 0;
3785        cam_tonemap_curve_t tonemapCurveRed;
3786        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
3787            for (int j = 0; j < 2; j++) {
3788               tonemapCurveRed.tonemap_points[i][j] =
3789                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3790               point++;
3791            }
3792        }
3793        tonemapCurves.curves[2] = tonemapCurveRed;
3794
3795        rc = AddSetParmEntryToBatch(mParameters,
3796                CAM_INTF_META_TONEMAP_CURVES,
3797                sizeof(tonemapCurves), &tonemapCurves);
3798    }
3799
3800    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3801        uint8_t captureIntent =
3802            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3803        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3804                sizeof(captureIntent), &captureIntent);
3805    }
3806
3807    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3808        uint8_t blackLevelLock =
3809            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3810        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3811                sizeof(blackLevelLock), &blackLevelLock);
3812    }
3813
3814    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3815        uint8_t lensShadingMapMode =
3816            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3817        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3818                sizeof(lensShadingMapMode), &lensShadingMapMode);
3819    }
3820
3821    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3822        cam_area_t roi;
3823        bool reset = true;
3824        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3825        if (scalerCropSet) {
3826            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3827        }
3828        if (reset) {
3829            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3830                    sizeof(roi), &roi);
3831        }
3832    }
3833
3834    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3835        cam_area_t roi;
3836        bool reset = true;
3837        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3838        if (scalerCropSet) {
3839            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3840        }
3841        if (reset) {
3842            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3843                    sizeof(roi), &roi);
3844        }
3845    }
3846
3847    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3848        cam_area_t roi;
3849        bool reset = true;
3850        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3851        if (scalerCropSet) {
3852            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3853        }
3854        if (reset) {
3855            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3856                    sizeof(roi), &roi);
3857        }
3858    }
3859    return rc;
3860}
3861
3862/*===========================================================================
3863 * FUNCTION   : getJpegSettings
3864 *
3865 * DESCRIPTION: save the jpeg settings in the HAL
3866 *
3867 *
3868 * PARAMETERS :
3869 *   @settings  : frame settings information from framework
3870 *
3871 *
3872 * RETURN     : success: NO_ERROR
3873 *              failure:
3874 *==========================================================================*/
3875int QCamera3HardwareInterface::getJpegSettings
3876                                  (const camera_metadata_t *settings)
3877{
3878    if (mJpegSettings) {
3879        if (mJpegSettings->gps_timestamp) {
3880            free(mJpegSettings->gps_timestamp);
3881            mJpegSettings->gps_timestamp = NULL;
3882        }
3883        if (mJpegSettings->gps_coordinates) {
3884            for (int i = 0; i < 3; i++) {
3885                free(mJpegSettings->gps_coordinates[i]);
3886                mJpegSettings->gps_coordinates[i] = NULL;
3887            }
3888        }
3889        free(mJpegSettings);
3890        mJpegSettings = NULL;
3891    }
3892    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3893    CameraMetadata jpeg_settings;
3894    jpeg_settings = settings;
3895
3896    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3897        mJpegSettings->jpeg_orientation =
3898            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3899    } else {
3900        mJpegSettings->jpeg_orientation = 0;
3901    }
3902    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3903        mJpegSettings->jpeg_quality =
3904            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3905    } else {
3906        mJpegSettings->jpeg_quality = 85;
3907    }
3908    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3909        mJpegSettings->thumbnail_size.width =
3910            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3911        mJpegSettings->thumbnail_size.height =
3912            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3913    } else {
3914        mJpegSettings->thumbnail_size.width = 0;
3915        mJpegSettings->thumbnail_size.height = 0;
3916    }
3917    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3918        for (int i = 0; i < 3; i++) {
3919            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3920            *(mJpegSettings->gps_coordinates[i]) =
3921                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3922        }
3923    } else{
3924       for (int i = 0; i < 3; i++) {
3925            mJpegSettings->gps_coordinates[i] = NULL;
3926        }
3927    }
3928
3929    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3930        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3931        *(mJpegSettings->gps_timestamp) =
3932            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3933    } else {
3934        mJpegSettings->gps_timestamp = NULL;
3935    }
3936
3937    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3938        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3939        for (int i = 0; i < len; i++) {
3940            mJpegSettings->gps_processing_method[i] =
3941                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3942        }
3943        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3944            mJpegSettings->gps_processing_method[len] = '\0';
3945        }
3946    } else {
3947        mJpegSettings->gps_processing_method[0] = '\0';
3948    }
3949
3950    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3951        mJpegSettings->sensor_sensitivity =
3952            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3953    } else {
3954        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3955    }
3956
3957    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3958
3959    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3960        mJpegSettings->lens_focal_length =
3961            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3962    }
3963    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3964        mJpegSettings->exposure_compensation =
3965            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3966    }
3967    mJpegSettings->sharpness = 10; //default value
3968    if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
3969        uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3970        if (edgeMode == ANDROID_EDGE_MODE_OFF) {
3971            mJpegSettings->sharpness = 0;
3972        }
3973    }
3974    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3975    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3976    mJpegSettings->is_jpeg_format = true;
3977    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3978    return 0;
3979}
3980
3981/*===========================================================================
3982 * FUNCTION   : captureResultCb
3983 *
3984 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3985 *
3986 * PARAMETERS :
3987 *   @frame  : frame information from mm-camera-interface
3988 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3989 *   @userdata: userdata
3990 *
3991 * RETURN     : NONE
3992 *==========================================================================*/
3993void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3994                camera3_stream_buffer_t *buffer,
3995                uint32_t frame_number, void *userdata)
3996{
3997    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3998    if (hw == NULL) {
3999        ALOGE("%s: Invalid hw %p", __func__, hw);
4000        return;
4001    }
4002
4003    hw->captureResultCb(metadata, buffer, frame_number);
4004    return;
4005}
4006
4007
4008/*===========================================================================
4009 * FUNCTION   : initialize
4010 *
4011 * DESCRIPTION: Pass framework callback pointers to HAL
4012 *
4013 * PARAMETERS :
4014 *
4015 *
4016 * RETURN     : Success : 0
4017 *              Failure: -ENODEV
4018 *==========================================================================*/
4019
4020int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
4021                                  const camera3_callback_ops_t *callback_ops)
4022{
4023    ALOGV("%s: E", __func__);
4024    QCamera3HardwareInterface *hw =
4025        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4026    if (!hw) {
4027        ALOGE("%s: NULL camera device", __func__);
4028        return -ENODEV;
4029    }
4030
4031    int rc = hw->initialize(callback_ops);
4032    ALOGV("%s: X", __func__);
4033    return rc;
4034}
4035
4036/*===========================================================================
4037 * FUNCTION   : configure_streams
4038 *
4039 * DESCRIPTION:
4040 *
4041 * PARAMETERS :
4042 *
4043 *
4044 * RETURN     : Success: 0
4045 *              Failure: -EINVAL (if stream configuration is invalid)
4046 *                       -ENODEV (fatal error)
4047 *==========================================================================*/
4048
4049int QCamera3HardwareInterface::configure_streams(
4050        const struct camera3_device *device,
4051        camera3_stream_configuration_t *stream_list)
4052{
4053    ALOGV("%s: E", __func__);
4054    QCamera3HardwareInterface *hw =
4055        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4056    if (!hw) {
4057        ALOGE("%s: NULL camera device", __func__);
4058        return -ENODEV;
4059    }
4060    int rc = hw->configureStreams(stream_list);
4061    ALOGV("%s: X", __func__);
4062    return rc;
4063}
4064
4065/*===========================================================================
4066 * FUNCTION   : register_stream_buffers
4067 *
4068 * DESCRIPTION: Register stream buffers with the device
4069 *
4070 * PARAMETERS :
4071 *
4072 * RETURN     :
4073 *==========================================================================*/
4074int QCamera3HardwareInterface::register_stream_buffers(
4075        const struct camera3_device *device,
4076        const camera3_stream_buffer_set_t *buffer_set)
4077{
4078    ALOGV("%s: E", __func__);
4079    QCamera3HardwareInterface *hw =
4080        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4081    if (!hw) {
4082        ALOGE("%s: NULL camera device", __func__);
4083        return -ENODEV;
4084    }
4085    int rc = hw->registerStreamBuffers(buffer_set);
4086    ALOGV("%s: X", __func__);
4087    return rc;
4088}
4089
4090/*===========================================================================
4091 * FUNCTION   : construct_default_request_settings
4092 *
4093 * DESCRIPTION: Configure a settings buffer to meet the required use case
4094 *
4095 * PARAMETERS :
4096 *
4097 *
4098 * RETURN     : Success: Return valid metadata
4099 *              Failure: Return NULL
4100 *==========================================================================*/
4101const camera_metadata_t* QCamera3HardwareInterface::
4102    construct_default_request_settings(const struct camera3_device *device,
4103                                        int type)
4104{
4105
4106    ALOGV("%s: E", __func__);
4107    camera_metadata_t* fwk_metadata = NULL;
4108    QCamera3HardwareInterface *hw =
4109        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4110    if (!hw) {
4111        ALOGE("%s: NULL camera device", __func__);
4112        return NULL;
4113    }
4114
4115    fwk_metadata = hw->translateCapabilityToMetadata(type);
4116
4117    ALOGV("%s: X", __func__);
4118    return fwk_metadata;
4119}
4120
4121/*===========================================================================
4122 * FUNCTION   : process_capture_request
4123 *
4124 * DESCRIPTION:
4125 *
4126 * PARAMETERS :
4127 *
4128 *
4129 * RETURN     :
4130 *==========================================================================*/
4131int QCamera3HardwareInterface::process_capture_request(
4132                    const struct camera3_device *device,
4133                    camera3_capture_request_t *request)
4134{
4135    ALOGV("%s: E", __func__);
4136    QCamera3HardwareInterface *hw =
4137        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4138    if (!hw) {
4139        ALOGE("%s: NULL camera device", __func__);
4140        return -EINVAL;
4141    }
4142
4143    int rc = hw->processCaptureRequest(request);
4144    ALOGV("%s: X", __func__);
4145    return rc;
4146}
4147
4148/*===========================================================================
4149 * FUNCTION   : get_metadata_vendor_tag_ops
4150 *
4151 * DESCRIPTION:
4152 *
4153 * PARAMETERS :
4154 *
4155 *
4156 * RETURN     :
4157 *==========================================================================*/
4158
4159void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
4160                const struct camera3_device *device,
4161                vendor_tag_query_ops_t* ops)
4162{
4163    ALOGV("%s: E", __func__);
4164    QCamera3HardwareInterface *hw =
4165        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4166    if (!hw) {
4167        ALOGE("%s: NULL camera device", __func__);
4168        return;
4169    }
4170
4171    hw->getMetadataVendorTagOps(ops);
4172    ALOGV("%s: X", __func__);
4173    return;
4174}
4175
4176/*===========================================================================
4177 * FUNCTION   : dump
4178 *
4179 * DESCRIPTION:
4180 *
4181 * PARAMETERS :
4182 *
4183 *
4184 * RETURN     :
4185 *==========================================================================*/
4186
4187void QCamera3HardwareInterface::dump(
4188                const struct camera3_device *device, int fd)
4189{
4190    ALOGV("%s: E", __func__);
4191    QCamera3HardwareInterface *hw =
4192        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4193    if (!hw) {
4194        ALOGE("%s: NULL camera device", __func__);
4195        return;
4196    }
4197
4198    hw->dump(fd);
4199    ALOGV("%s: X", __func__);
4200    return;
4201}
4202
4203/*===========================================================================
4204 * FUNCTION   : flush
4205 *
4206 * DESCRIPTION:
4207 *
4208 * PARAMETERS :
4209 *
4210 *
4211 * RETURN     :
4212 *==========================================================================*/
4213
4214int QCamera3HardwareInterface::flush(
4215                const struct camera3_device *device)
4216{
4217    int rc;
4218    ALOGV("%s: E", __func__);
4219    QCamera3HardwareInterface *hw =
4220        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4221    if (!hw) {
4222        ALOGE("%s: NULL camera device", __func__);
4223        return -EINVAL;
4224    }
4225
4226    rc = hw->flush();
4227    ALOGV("%s: X", __func__);
4228    return rc;
4229}
4230
4231/*===========================================================================
4232 * FUNCTION   : close_camera_device
4233 *
4234 * DESCRIPTION:
4235 *
4236 * PARAMETERS :
4237 *
4238 *
4239 * RETURN     :
4240 *==========================================================================*/
4241int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
4242{
4243    ALOGV("%s: E", __func__);
4244    int ret = NO_ERROR;
4245    QCamera3HardwareInterface *hw =
4246        reinterpret_cast<QCamera3HardwareInterface *>(
4247            reinterpret_cast<camera3_device_t *>(device)->priv);
4248    if (!hw) {
4249        ALOGE("NULL camera device");
4250        return BAD_VALUE;
4251    }
4252    delete hw;
4253
4254    pthread_mutex_lock(&mCameraSessionLock);
4255    mCameraSessionActive = 0;
4256    pthread_mutex_unlock(&mCameraSessionLock);
4257    ALOGV("%s: X", __func__);
4258    return ret;
4259}
4260
4261/*===========================================================================
4262 * FUNCTION   : getWaveletDenoiseProcessPlate
4263 *
4264 * DESCRIPTION: query wavelet denoise process plate
4265 *
4266 * PARAMETERS : None
4267 *
4268 * RETURN     : WNR prcocess plate vlaue
4269 *==========================================================================*/
4270cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
4271{
4272    char prop[PROPERTY_VALUE_MAX];
4273    memset(prop, 0, sizeof(prop));
4274    property_get("persist.denoise.process.plates", prop, "0");
4275    int processPlate = atoi(prop);
4276    switch(processPlate) {
4277    case 0:
4278        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
4279    case 1:
4280        return CAM_WAVELET_DENOISE_CBCR_ONLY;
4281    case 2:
4282        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4283    case 3:
4284        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
4285    default:
4286        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
4287    }
4288}
4289
4290/*===========================================================================
4291 * FUNCTION   : needRotationReprocess
4292 *
4293 * DESCRIPTION: if rotation needs to be done by reprocess in pp
4294 *
4295 * PARAMETERS : none
4296 *
4297 * RETURN     : true: needed
4298 *              false: no need
4299 *==========================================================================*/
4300bool QCamera3HardwareInterface::needRotationReprocess()
4301{
4302
4303    if (!mJpegSettings->is_jpeg_format) {
4304        // RAW image, no need to reprocess
4305        return false;
4306    }
4307
4308    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
4309        mJpegSettings->jpeg_orientation > 0) {
4310        // current rotation is not zero, and pp has the capability to process rotation
4311        ALOGD("%s: need do reprocess for rotation", __func__);
4312        return true;
4313    }
4314
4315    return false;
4316}
4317
4318/*===========================================================================
4319 * FUNCTION   : needReprocess
4320 *
4321 * DESCRIPTION: if reprocess in needed
4322 *
4323 * PARAMETERS : none
4324 *
4325 * RETURN     : true: needed
4326 *              false: no need
4327 *==========================================================================*/
4328bool QCamera3HardwareInterface::needReprocess()
4329{
4330    if (!mJpegSettings->is_jpeg_format) {
4331        // RAW image, no need to reprocess
4332        return false;
4333    }
4334
4335    if ((mJpegSettings->min_required_pp_mask > 0) ||
4336         isWNREnabled()) {
4337        // TODO: add for ZSL HDR later
4338        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
4339        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
4340        return true;
4341    }
4342    return needRotationReprocess();
4343}
4344
4345/*===========================================================================
4346 * FUNCTION   : addOnlineReprocChannel
4347 *
4348 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
4349 *              coming from input channel
4350 *
4351 * PARAMETERS :
4352 *   @pInputChannel : ptr to input channel whose frames will be post-processed
4353 *
4354 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
4355 *==========================================================================*/
4356QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
4357              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
4358{
4359    int32_t rc = NO_ERROR;
4360    QCamera3ReprocessChannel *pChannel = NULL;
4361    if (pInputChannel == NULL) {
4362        ALOGE("%s: input channel obj is NULL", __func__);
4363        return NULL;
4364    }
4365
4366    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
4367            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
4368    if (NULL == pChannel) {
4369        ALOGE("%s: no mem for reprocess channel", __func__);
4370        return NULL;
4371    }
4372
4373    // Capture channel, only need snapshot and postview streams start together
4374    mm_camera_channel_attr_t attr;
4375    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4376    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4377    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
4378    rc = pChannel->initialize();
4379    if (rc != NO_ERROR) {
4380        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
4381        delete pChannel;
4382        return NULL;
4383    }
4384
4385    // pp feature config
4386    cam_pp_feature_config_t pp_config;
4387    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
4388    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
4389        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
4390        pp_config.sharpness = mJpegSettings->sharpness;
4391    }
4392
4393    if (isWNREnabled()) {
4394        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
4395        pp_config.denoise2d.denoise_enable = 1;
4396        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
4397    }
4398    if (needRotationReprocess()) {
4399        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
4400        int rotation = mJpegSettings->jpeg_orientation;
4401        if (rotation == 0) {
4402            pp_config.rotation = ROTATE_0;
4403        } else if (rotation == 90) {
4404            pp_config.rotation = ROTATE_90;
4405        } else if (rotation == 180) {
4406            pp_config.rotation = ROTATE_180;
4407        } else if (rotation == 270) {
4408            pp_config.rotation = ROTATE_270;
4409        }
4410    }
4411
4412   rc = pChannel->addReprocStreamsFromSource(pp_config,
4413                                             pInputChannel,
4414                                             mMetadataChannel);
4415
4416    if (rc != NO_ERROR) {
4417        delete pChannel;
4418        return NULL;
4419    }
4420    return pChannel;
4421}
4422
4423int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
4424{
4425    return gCamCapability[mCameraId]->min_num_pp_bufs;
4426}
4427
4428bool QCamera3HardwareInterface::isWNREnabled() {
4429    return gCamCapability[mCameraId]->isWnrSupported;
4430}
4431
4432}; //end namespace qcamera
4433