QCamera3HWI.cpp revision fbb2906a9a77b06e924b1404f6f7977f2990eacd
1/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34#include <cutils/properties.h>
35#include <hardware/camera3.h>
36#include <camera/CameraMetadata.h>
37#include <stdlib.h>
38#include <fcntl.h>
39#include <stdint.h>
40#include <utils/Log.h>
41#include <utils/Errors.h>
42#include <ui/Fence.h>
43#include <gralloc_priv.h>
44#include "QCamera3HWI.h"
45#include "QCamera3Mem.h"
46#include "QCamera3Channel.h"
47#include "QCamera3PostProc.h"
48
49using namespace android;
50
51namespace qcamera {
52
53#define MAX(a, b) ((a) > (b) ? (a) : (b))
54
55#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
56cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
57const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
58
59pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
60    PTHREAD_MUTEX_INITIALIZER;
61unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
62
63const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
64    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
65    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
66    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
67    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
68    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
69    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
70    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
71    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
72    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
73};
74
75const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
76    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
77    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
78    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
79    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
80    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
81    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
82    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
83    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
84    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
85};
86
87const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
88    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
89    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
90    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
91    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
92    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
93    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
94    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
95    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
96    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
97    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
98    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
99    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
100    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
101    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
102    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
103    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
104};
105
106const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
107    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
108    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
109    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
110    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
111    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
112    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
116    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
117    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
118    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
119    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
120};
121
122const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
123    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
124    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
125    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
126    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
127    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
128};
129
130const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
131    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
132    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
133    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
134};
135
136const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
137    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
138    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
139};
140
141const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
142    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
143      CAM_FOCUS_UNCALIBRATED },
144    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
145      CAM_FOCUS_APPROXIMATE },
146    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
147      CAM_FOCUS_CALIBRATED }
148};
149
150const int32_t available_thumbnail_sizes[] = {0, 0,
151                                             176, 144,
152                                             320, 240,
153                                             432, 288,
154                                             480, 288,
155                                             512, 288,
156                                             512, 384};
157
158const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
159    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
160    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
161    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
162    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
163    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
164};
165/* Custom tag definitions */
166
167// QCamera3 camera metadata sections
168enum qcamera3_ext_section {
169    QCAMERA3_PRIVATEDATA = VENDOR_SECTION,
170    QCAMERA3_SECTIONS_END
171};
172
173const int QCAMERA3_SECTION_COUNT = QCAMERA3_SECTIONS_END - VENDOR_SECTION;
174
175enum qcamera3_ext_section_ranges {
176    QCAMERA3_PRIVATEDATA_START = QCAMERA3_PRIVATEDATA << 16
177};
178
179enum qcamera3_ext_tags {
180    QCAMERA3_PRIVATEDATA_REPROCESS = QCAMERA3_PRIVATEDATA_START,
181    QCAMERA3_PRIVATEDATA_END
182};
183
184enum qcamera3_ext_tags qcamera3_ext3_section_bounds[QCAMERA3_SECTIONS_END -
185    VENDOR_SECTION] = {
186        QCAMERA3_PRIVATEDATA_END
187} ;
188
189typedef struct vendor_tag_info {
190    const char *tag_name;
191    uint8_t     tag_type;
192} vendor_tag_info_t;
193
194const char *qcamera3_ext_section_names[QCAMERA3_SECTIONS_END -
195        VENDOR_SECTION] = {
196    "org.codeaurora.qcamera3.privatedata"
197};
198
199vendor_tag_info_t qcamera3_privatedata[QCAMERA3_PRIVATEDATA_END - QCAMERA3_PRIVATEDATA_START] = {
200    { "privatedata", TYPE_BYTE }
201};
202
203vendor_tag_info_t *qcamera3_tag_info[QCAMERA3_SECTIONS_END -
204        VENDOR_SECTION] = {
205    qcamera3_privatedata
206};
207
208camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
209    initialize:                         QCamera3HardwareInterface::initialize,
210    configure_streams:                  QCamera3HardwareInterface::configure_streams,
211    register_stream_buffers:            NULL,
212    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
213    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
214    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
215    dump:                               QCamera3HardwareInterface::dump,
216    flush:                              QCamera3HardwareInterface::flush,
217    reserved:                           {0},
218};
219
220int QCamera3HardwareInterface::kMaxInFlight = 5;
221
222/*===========================================================================
223 * FUNCTION   : QCamera3HardwareInterface
224 *
225 * DESCRIPTION: constructor of QCamera3HardwareInterface
226 *
227 * PARAMETERS :
228 *   @cameraId  : camera ID
229 *
230 * RETURN     : none
231 *==========================================================================*/
232QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
233    : mCameraId(cameraId),
234      mCameraHandle(NULL),
235      mCameraOpened(false),
236      mCameraInitialized(false),
237      mCallbackOps(NULL),
238      mInputStream(NULL),
239      mMetadataChannel(NULL),
240      mPictureChannel(NULL),
241      mRawChannel(NULL),
242      mFirstRequest(false),
243      mParamHeap(NULL),
244      mParameters(NULL),
245      mLoopBackResult(NULL),
246      mMinProcessedFrameDuration(0),
247      mMinJpegFrameDuration(0),
248      mMinRawFrameDuration(0),
249      m_pPowerModule(NULL),
250      mHdrHint(false),
251      mMetaFrameCount(0)
252{
253    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
254    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
255    mCameraDevice.common.close = close_camera_device;
256    mCameraDevice.ops = &mCameraOps;
257    mCameraDevice.priv = this;
258    gCamCapability[cameraId]->version = CAM_HAL_V3;
259    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
260    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
261    gCamCapability[cameraId]->min_num_pp_bufs = 3;
262
263    pthread_cond_init(&mRequestCond, NULL);
264    mPendingRequest = 0;
265    mCurrentRequestId = -1;
266    pthread_mutex_init(&mMutex, NULL);
267
268    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
269        mDefaultMetadata[i] = NULL;
270
271#ifdef HAS_MULTIMEDIA_HINTS
272    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
273        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
274    }
275#endif
276}
277
278/*===========================================================================
279 * FUNCTION   : ~QCamera3HardwareInterface
280 *
281 * DESCRIPTION: destructor of QCamera3HardwareInterface
282 *
283 * PARAMETERS : none
284 *
285 * RETURN     : none
286 *==========================================================================*/
287QCamera3HardwareInterface::~QCamera3HardwareInterface()
288{
289    ALOGV("%s: E", __func__);
290    /* We need to stop all streams before deleting any stream */
291
292    // NOTE: 'camera3_stream_t *' objects are already freed at
293    //        this stage by the framework
294    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
295        it != mStreamInfo.end(); it++) {
296        QCamera3Channel *channel = (*it)->channel;
297        if (channel) {
298            channel->stop();
299        }
300    }
301
302    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
303        it != mStreamInfo.end(); it++) {
304        QCamera3Channel *channel = (*it)->channel;
305        if (channel)
306            delete channel;
307        free (*it);
308    }
309
310    mPictureChannel = NULL;
311
312    /* Clean up all channels */
313    if (mCameraInitialized) {
314        if (mMetadataChannel) {
315            mMetadataChannel->stop();
316            delete mMetadataChannel;
317            mMetadataChannel = NULL;
318        }
319        deinitParameters();
320    }
321
322    if (mCameraOpened)
323        closeCamera();
324
325    mPendingBuffersMap.mPendingBufferList.clear();
326    mPendingRequestsList.clear();
327
328    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
329        if (mDefaultMetadata[i])
330            free_camera_metadata(mDefaultMetadata[i]);
331
332    pthread_cond_destroy(&mRequestCond);
333
334    pthread_mutex_destroy(&mMutex);
335    ALOGV("%s: X", __func__);
336}
337
338/*===========================================================================
339 * FUNCTION   : openCamera
340 *
341 * DESCRIPTION: open camera
342 *
343 * PARAMETERS :
344 *   @hw_device  : double ptr for camera device struct
345 *
346 * RETURN     : int32_t type of status
347 *              NO_ERROR  -- success
348 *              none-zero failure code
349 *==========================================================================*/
350int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
351{
352    int rc = 0;
353    pthread_mutex_lock(&mCameraSessionLock);
354    if (mCameraSessionActive) {
355        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
356        pthread_mutex_unlock(&mCameraSessionLock);
357        return -EUSERS;
358    }
359
360    if (mCameraOpened) {
361        *hw_device = NULL;
362        return PERMISSION_DENIED;
363    }
364
365    rc = openCamera();
366    if (rc == 0) {
367        *hw_device = &mCameraDevice.common;
368        mCameraSessionActive = 1;
369    } else
370        *hw_device = NULL;
371
372#ifdef HAS_MULTIMEDIA_HINTS
373    if (rc == 0) {
374        if (m_pPowerModule) {
375            if (m_pPowerModule->powerHint) {
376                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
377                        (void *)"state=1");
378            }
379        }
380    }
381#endif
382    pthread_mutex_unlock(&mCameraSessionLock);
383    return rc;
384}
385
386/*===========================================================================
387 * FUNCTION   : openCamera
388 *
389 * DESCRIPTION: open camera
390 *
391 * PARAMETERS : none
392 *
393 * RETURN     : int32_t type of status
394 *              NO_ERROR  -- success
395 *              none-zero failure code
396 *==========================================================================*/
397int QCamera3HardwareInterface::openCamera()
398{
399    if (mCameraHandle) {
400        ALOGE("Failure: Camera already opened");
401        return ALREADY_EXISTS;
402    }
403    mCameraHandle = camera_open(mCameraId);
404    if (!mCameraHandle) {
405        ALOGE("camera_open failed.");
406        return UNKNOWN_ERROR;
407    }
408
409    mCameraOpened = true;
410
411    return NO_ERROR;
412}
413
414/*===========================================================================
415 * FUNCTION   : closeCamera
416 *
417 * DESCRIPTION: close camera
418 *
419 * PARAMETERS : none
420 *
421 * RETURN     : int32_t type of status
422 *              NO_ERROR  -- success
423 *              none-zero failure code
424 *==========================================================================*/
425int QCamera3HardwareInterface::closeCamera()
426{
427    int rc = NO_ERROR;
428
429    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
430    mCameraHandle = NULL;
431    mCameraOpened = false;
432
433#ifdef HAS_MULTIMEDIA_HINTS
434    if (rc == NO_ERROR) {
435        if (m_pPowerModule) {
436            if (m_pPowerModule->powerHint) {
437                if(mHdrHint == true) {
438                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
439                            (void *)"state=3");
440                    mHdrHint = false;
441                }
442                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
443                        (void *)"state=0");
444            }
445        }
446    }
447#endif
448
449    return rc;
450}
451
452/*===========================================================================
453 * FUNCTION   : initialize
454 *
455 * DESCRIPTION: Initialize frameworks callback functions
456 *
457 * PARAMETERS :
458 *   @callback_ops : callback function to frameworks
459 *
460 * RETURN     :
461 *
462 *==========================================================================*/
463int QCamera3HardwareInterface::initialize(
464        const struct camera3_callback_ops *callback_ops)
465{
466    int rc;
467
468    pthread_mutex_lock(&mMutex);
469
470    rc = initParameters();
471    if (rc < 0) {
472        ALOGE("%s: initParamters failed %d", __func__, rc);
473       goto err1;
474    }
475    mCallbackOps = callback_ops;
476
477    pthread_mutex_unlock(&mMutex);
478    mCameraInitialized = true;
479    return 0;
480
481err1:
482    pthread_mutex_unlock(&mMutex);
483    return rc;
484}
485
486/*===========================================================================
487 * FUNCTION   : configureStreams
488 *
489 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
490 *              and output streams.
491 *
492 * PARAMETERS :
493 *   @stream_list : streams to be configured
494 *
495 * RETURN     :
496 *
497 *==========================================================================*/
498int QCamera3HardwareInterface::configureStreams(
499        camera3_stream_configuration_t *streamList)
500{
501    int rc = 0;
502
503    // Sanity check stream_list
504    if (streamList == NULL) {
505        ALOGE("%s: NULL stream configuration", __func__);
506        return BAD_VALUE;
507    }
508    if (streamList->streams == NULL) {
509        ALOGE("%s: NULL stream list", __func__);
510        return BAD_VALUE;
511    }
512
513    if (streamList->num_streams < 1) {
514        ALOGE("%s: Bad number of streams requested: %d", __func__,
515                streamList->num_streams);
516        return BAD_VALUE;
517    }
518
519    /* first invalidate all the steams in the mStreamList
520     * if they appear again, they will be validated */
521    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
522            it != mStreamInfo.end(); it++) {
523        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
524        channel->stop();
525        (*it)->status = INVALID;
526    }
527    if (mMetadataChannel) {
528        /* If content of mStreamInfo is not 0, there is metadata stream */
529        mMetadataChannel->stop();
530    }
531
532#ifdef HAS_MULTIMEDIA_HINTS
533    if(mHdrHint == true) {
534        if (m_pPowerModule) {
535            if (m_pPowerModule->powerHint) {
536                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
537                        (void *)"state=3");
538                mHdrHint = false;
539            }
540        }
541    }
542#endif
543
544    pthread_mutex_lock(&mMutex);
545
546    bool isZsl = false;
547    camera3_stream_t *inputStream = NULL;
548    camera3_stream_t *jpegStream = NULL;
549    cam_stream_size_info_t stream_config_info;
550
551    for (size_t i = 0; i < streamList->num_streams; i++) {
552        camera3_stream_t *newStream = streamList->streams[i];
553        ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
554                __func__, newStream->stream_type, newStream->format,
555                 newStream->width, newStream->height);
556        //if the stream is in the mStreamList validate it
557        bool stream_exists = false;
558        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
559                it != mStreamInfo.end(); it++) {
560            if ((*it)->stream == newStream) {
561                QCamera3Channel *channel =
562                    (QCamera3Channel*)(*it)->stream->priv;
563                stream_exists = true;
564                delete channel;
565                (*it)->status = VALID;
566                (*it)->stream->priv = NULL;
567                (*it)->channel = NULL;
568            }
569        }
570        if (!stream_exists) {
571            //new stream
572            stream_info_t* stream_info;
573            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
574            stream_info->stream = newStream;
575            stream_info->status = VALID;
576            stream_info->channel = NULL;
577            mStreamInfo.push_back(stream_info);
578        }
579        if (newStream->stream_type == CAMERA3_STREAM_INPUT
580                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
581            if (inputStream != NULL) {
582                ALOGE("%s: Multiple input streams requested!", __func__);
583                pthread_mutex_unlock(&mMutex);
584                return BAD_VALUE;
585            }
586            inputStream = newStream;
587        }
588        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
589            jpegStream = newStream;
590        }
591    }
592    mInputStream = inputStream;
593
594    cleanAndSortStreamInfo();
595    if (mMetadataChannel) {
596        delete mMetadataChannel;
597        mMetadataChannel = NULL;
598    }
599
600    //Create metadata channel and initialize it
601    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
602                    mCameraHandle->ops, captureResultCb,
603                    &gCamCapability[mCameraId]->padding_info, this);
604    if (mMetadataChannel == NULL) {
605        ALOGE("%s: failed to allocate metadata channel", __func__);
606        rc = -ENOMEM;
607        pthread_mutex_unlock(&mMutex);
608        return rc;
609    }
610    rc = mMetadataChannel->initialize();
611    if (rc < 0) {
612        ALOGE("%s: metadata channel initialization failed", __func__);
613        delete mMetadataChannel;
614        mMetadataChannel = NULL;
615        pthread_mutex_unlock(&mMutex);
616        return rc;
617    }
618
619    /* Allocate channel objects for the requested streams */
620    for (size_t i = 0; i < streamList->num_streams; i++) {
621        camera3_stream_t *newStream = streamList->streams[i];
622        uint32_t stream_usage = newStream->usage;
623        stream_config_info.stream_sizes[i].width = newStream->width;
624        stream_config_info.stream_sizes[i].height = newStream->height;
625        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
626            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
627            //for zsl stream the size is active array size
628            isZsl = true;
629            stream_config_info.stream_sizes[i].width =
630                    gCamCapability[mCameraId]->active_array_size.width;
631            stream_config_info.stream_sizes[i].height =
632                    gCamCapability[mCameraId]->active_array_size.height;
633            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
634        } else {
635           //for non zsl streams find out the format
636           switch (newStream->format) {
637           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
638              {
639                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
640                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
641                 } else {
642                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
643                 }
644              }
645              break;
646           case HAL_PIXEL_FORMAT_YCbCr_420_888:
647              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
648#ifdef HAS_MULTIMEDIA_HINTS
649              if (m_pPowerModule) {
650                  if (m_pPowerModule->powerHint) {
651                      m_pPowerModule->powerHint(m_pPowerModule,
652                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
653                      mHdrHint = true;
654                  }
655              }
656#endif
657              break;
658           case HAL_PIXEL_FORMAT_BLOB:
659              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
660              break;
661           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
662           case HAL_PIXEL_FORMAT_RAW16:
663              stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
664              break;
665           default:
666              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
667              break;
668           }
669        }
670        if (newStream->priv == NULL) {
671            //New stream, construct channel
672            switch (newStream->stream_type) {
673            case CAMERA3_STREAM_INPUT:
674                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
675                break;
676            case CAMERA3_STREAM_BIDIRECTIONAL:
677                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
678                    GRALLOC_USAGE_HW_CAMERA_WRITE;
679                break;
680            case CAMERA3_STREAM_OUTPUT:
681                /* For video encoding stream, set read/write rarely
682                 * flag so that they may be set to un-cached */
683                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
684                    newStream->usage =
685                         (GRALLOC_USAGE_SW_READ_RARELY |
686                         GRALLOC_USAGE_SW_WRITE_RARELY |
687                         GRALLOC_USAGE_HW_CAMERA_WRITE);
688                else
689                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
690                break;
691            default:
692                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
693                break;
694            }
695
696            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
697                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
698                QCamera3Channel *channel = NULL;
699                switch (newStream->format) {
700                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
701                case HAL_PIXEL_FORMAT_YCbCr_420_888:
702                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
703                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
704                            mCameraHandle->ops, captureResultCb,
705                            &gCamCapability[mCameraId]->padding_info,
706                            this,
707                            newStream,
708                            (cam_stream_type_t) stream_config_info.type[i]);
709                    if (channel == NULL) {
710                        ALOGE("%s: allocation of channel failed", __func__);
711                        pthread_mutex_unlock(&mMutex);
712                        return -ENOMEM;
713                    }
714
715                    newStream->priv = channel;
716                    break;
717                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
718                case HAL_PIXEL_FORMAT_RAW16:
719                    newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
720                    mRawChannel = new QCamera3RawChannel(
721                            mCameraHandle->camera_handle,
722                            mCameraHandle->ops, captureResultCb,
723                            &gCamCapability[mCameraId]->padding_info,
724                            this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
725                    if (mRawChannel == NULL) {
726                        ALOGE("%s: allocation of raw channel failed", __func__);
727                        pthread_mutex_unlock(&mMutex);
728                        return -ENOMEM;
729                    }
730
731                    newStream->priv = (QCamera3Channel*)mRawChannel;
732                    break;
733                case HAL_PIXEL_FORMAT_BLOB:
734                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
735                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
736                            mCameraHandle->ops, captureResultCb,
737                            &gCamCapability[mCameraId]->padding_info, this, newStream);
738                    if (mPictureChannel == NULL) {
739                        ALOGE("%s: allocation of channel failed", __func__);
740                        pthread_mutex_unlock(&mMutex);
741                        return -ENOMEM;
742                    }
743                    newStream->priv = (QCamera3Channel*)mPictureChannel;
744                    break;
745
746                default:
747                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
748                    break;
749                }
750            }
751
752            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
753                    it != mStreamInfo.end(); it++) {
754                if ((*it)->stream == newStream) {
755                    (*it)->channel = (QCamera3Channel*) newStream->priv;
756                    break;
757                }
758            }
759        } else {
760            // Channel already exists for this stream
761            // Do nothing for now
762        }
763    }
764
765    if (isZsl)
766        mPictureChannel->overrideYuvSize(
767                gCamCapability[mCameraId]->active_array_size.width,
768                gCamCapability[mCameraId]->active_array_size.height);
769
770    int32_t hal_version = CAM_HAL_V3;
771    stream_config_info.num_streams = streamList->num_streams;
772
773    // settings/parameters don't carry over for new configureStreams
774    memset(mParameters, 0, sizeof(metadata_buffer_t));
775
776    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
777    AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
778                sizeof(hal_version), &hal_version);
779
780    AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
781                sizeof(stream_config_info), &stream_config_info);
782
783    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
784
785    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
786    mPendingRequestsList.clear();
787    mPendingFrameDropList.clear();
788    // Initialize/Reset the pending buffers list
789    mPendingBuffersMap.num_buffers = 0;
790    mPendingBuffersMap.mPendingBufferList.clear();
791
792    mFirstRequest = true;
793
794    //Get min frame duration for this streams configuration
795    deriveMinFrameDuration();
796
797    pthread_mutex_unlock(&mMutex);
798    return rc;
799}
800
801/*===========================================================================
802 * FUNCTION   : validateCaptureRequest
803 *
804 * DESCRIPTION: validate a capture request from camera service
805 *
806 * PARAMETERS :
807 *   @request : request from framework to process
808 *
809 * RETURN     :
810 *
811 *==========================================================================*/
812int QCamera3HardwareInterface::validateCaptureRequest(
813                    camera3_capture_request_t *request)
814{
815    ssize_t idx = 0;
816    const camera3_stream_buffer_t *b;
817    CameraMetadata meta;
818
819    /* Sanity check the request */
820    if (request == NULL) {
821        ALOGE("%s: NULL capture request", __func__);
822        return BAD_VALUE;
823    }
824
825    if (request->settings == NULL && mFirstRequest) {
826        /*settings cannot be null for the first request*/
827        return BAD_VALUE;
828    }
829
830    uint32_t frameNumber = request->frame_number;
831    if (request->input_buffer != NULL &&
832            request->input_buffer->stream != mInputStream) {
833        ALOGE("%s: Request %d: Input buffer not from input stream!",
834                __FUNCTION__, frameNumber);
835        return BAD_VALUE;
836    }
837    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
838        ALOGE("%s: Request %d: No output buffers provided!",
839                __FUNCTION__, frameNumber);
840        return BAD_VALUE;
841    }
842    if (request->input_buffer != NULL) {
843        b = request->input_buffer;
844        QCamera3Channel *channel =
845            static_cast<QCamera3Channel*>(b->stream->priv);
846        if (channel == NULL) {
847            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
848                    __func__, frameNumber, idx);
849            return BAD_VALUE;
850        }
851        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
852            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
853                    __func__, frameNumber, idx);
854            return BAD_VALUE;
855        }
856        if (b->release_fence != -1) {
857            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
858                    __func__, frameNumber, idx);
859            return BAD_VALUE;
860        }
861        if (b->buffer == NULL) {
862            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
863                    __func__, frameNumber, idx);
864            return BAD_VALUE;
865        }
866    }
867
868    // Validate all buffers
869    b = request->output_buffers;
870    do {
871        QCamera3Channel *channel =
872                static_cast<QCamera3Channel*>(b->stream->priv);
873        if (channel == NULL) {
874            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
875                    __func__, frameNumber, idx);
876            return BAD_VALUE;
877        }
878        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
879            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
880                    __func__, frameNumber, idx);
881            return BAD_VALUE;
882        }
883        if (b->release_fence != -1) {
884            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
885                    __func__, frameNumber, idx);
886            return BAD_VALUE;
887        }
888        if (b->buffer == NULL) {
889            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
890                    __func__, frameNumber, idx);
891            return BAD_VALUE;
892        }
893        idx++;
894        b = request->output_buffers + idx;
895    } while (idx < (ssize_t)request->num_output_buffers);
896
897    return NO_ERROR;
898}
899
900/*===========================================================================
901 * FUNCTION   : deriveMinFrameDuration
902 *
903 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
904 *              on currently configured streams.
905 *
906 * PARAMETERS : NONE
907 *
908 * RETURN     : NONE
909 *
910 *==========================================================================*/
911void QCamera3HardwareInterface::deriveMinFrameDuration()
912{
913    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
914
915    maxJpegDim = 0;
916    maxProcessedDim = 0;
917    maxRawDim = 0;
918
919    // Figure out maximum jpeg, processed, and raw dimensions
920    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
921        it != mStreamInfo.end(); it++) {
922
923        // Input stream doesn't have valid stream_type
924        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
925            continue;
926
927        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
928        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
929            if (dimension > maxJpegDim)
930                maxJpegDim = dimension;
931        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
932                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
933            if (dimension > maxRawDim)
934                maxRawDim = dimension;
935        } else {
936            if (dimension > maxProcessedDim)
937                maxProcessedDim = dimension;
938        }
939    }
940
941    //Assume all jpeg dimensions are in processed dimensions.
942    if (maxJpegDim > maxProcessedDim)
943        maxProcessedDim = maxJpegDim;
944    //Find the smallest raw dimension that is greater or equal to jpeg dimension
945    if (maxProcessedDim > maxRawDim) {
946        maxRawDim = INT32_MAX;
947        for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
948            i++) {
949
950            int32_t dimension =
951                gCamCapability[mCameraId]->raw_dim[i].width *
952                gCamCapability[mCameraId]->raw_dim[i].height;
953
954            if (dimension >= maxProcessedDim && dimension < maxRawDim)
955                maxRawDim = dimension;
956        }
957    }
958
959    //Find minimum durations for processed, jpeg, and raw
960    for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
961            i++) {
962        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
963                gCamCapability[mCameraId]->raw_dim[i].height) {
964            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
965            break;
966        }
967    }
968    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
969        if (maxProcessedDim ==
970            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
971            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
972            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
973            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
974            break;
975        }
976    }
977}
978
979/*===========================================================================
980 * FUNCTION   : getMinFrameDuration
981 *
982 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
983 *              and current request configuration.
984 *
985 * PARAMETERS : @request: requset sent by the frameworks
986 *
987 * RETURN     : min farme duration for a particular request
988 *
989 *==========================================================================*/
990int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
991{
992    bool hasJpegStream = false;
993    bool hasRawStream = false;
994    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
995        const camera3_stream_t *stream = request->output_buffers[i].stream;
996        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
997            hasJpegStream = true;
998        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
999                stream->format == HAL_PIXEL_FORMAT_RAW16)
1000            hasRawStream = true;
1001    }
1002
1003    if (!hasJpegStream)
1004        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1005    else
1006        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1007}
1008
1009/*===========================================================================
1010 * FUNCTION   : handleMetadataWithLock
1011 *
1012 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1013 *
1014 * PARAMETERS : @metadata_buf: metadata buffer
1015 *
1016 * RETURN     :
1017 *
1018 *==========================================================================*/
1019void QCamera3HardwareInterface::handleMetadataWithLock(
1020    mm_camera_super_buf_t *metadata_buf)
1021{
1022    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1023    int32_t frame_number_valid = *(int32_t *)
1024        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1025    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1026        CAM_INTF_META_PENDING_REQUESTS, metadata);
1027    uint32_t frame_number = *(uint32_t *)
1028        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1029    const struct timeval *tv = (const struct timeval *)
1030        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1031    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1032        tv->tv_usec * NSEC_PER_USEC;
1033    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1034        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1035
1036    int32_t urgent_frame_number_valid = *(int32_t *)
1037        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1038    uint32_t urgent_frame_number = *(uint32_t *)
1039        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1040
1041    if (urgent_frame_number_valid) {
1042        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1043          __func__, urgent_frame_number, capture_time);
1044
1045        //Recieved an urgent Frame Number, handle it
1046        //using HAL3.1 quirk for partial results
1047        for (List<PendingRequestInfo>::iterator i =
1048            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1049            camera3_notify_msg_t notify_msg;
1050            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1051                __func__, i->frame_number, urgent_frame_number);
1052
1053            if (i->frame_number < urgent_frame_number &&
1054                i->bNotified == 0) {
1055                notify_msg.type = CAMERA3_MSG_SHUTTER;
1056                notify_msg.message.shutter.frame_number = i->frame_number;
1057                notify_msg.message.shutter.timestamp = capture_time -
1058                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1059                mCallbackOps->notify(mCallbackOps, &notify_msg);
1060                i->timestamp = notify_msg.message.shutter.timestamp;
1061                i->bNotified = 1;
1062                ALOGV("%s: Dummy notification !!!! notify frame_number = %d, capture_time = %lld",
1063                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1064            }
1065
1066            if (i->frame_number == urgent_frame_number) {
1067
1068                camera3_capture_result_t result;
1069
1070                // Send shutter notify to frameworks
1071                notify_msg.type = CAMERA3_MSG_SHUTTER;
1072                notify_msg.message.shutter.frame_number = i->frame_number;
1073                notify_msg.message.shutter.timestamp = capture_time;
1074                mCallbackOps->notify(mCallbackOps, &notify_msg);
1075
1076                i->timestamp = capture_time;
1077                i->bNotified = 1;
1078
1079                // Extract 3A metadata
1080                result.result =
1081                    translateCbUrgentMetadataToResultMetadata(metadata);
1082                // Populate metadata result
1083                result.frame_number = urgent_frame_number;
1084                result.num_output_buffers = 0;
1085                result.output_buffers = NULL;
1086                mCallbackOps->process_capture_result(mCallbackOps, &result);
1087                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1088                     __func__, result.frame_number, capture_time);
1089                free_camera_metadata((camera_metadata_t *)result.result);
1090                break;
1091            }
1092        }
1093    }
1094
1095    if (!frame_number_valid) {
1096        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1097        mMetadataChannel->bufDone(metadata_buf);
1098        free(metadata_buf);
1099        goto done_metadata;
1100    }
1101    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1102            frame_number, capture_time);
1103
1104    // Go through the pending requests info and send shutter/results to frameworks
1105    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1106        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1107        camera3_capture_result_t result;
1108        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1109
1110        // Flush out all entries with less or equal frame numbers.
1111        mPendingRequest--;
1112
1113        // Check whether any stream buffer corresponding to this is dropped or not
1114        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1115        // buffer with CAMERA3_BUFFER_STATUS_ERROR
1116        if (cam_frame_drop.frame_dropped) {
1117            camera3_notify_msg_t notify_msg;
1118            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1119                    j != i->buffers.end(); j++) {
1120                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1121                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1122                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1123                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1124                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1125                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1126                             __func__, i->frame_number, streamID);
1127                      notify_msg.type = CAMERA3_MSG_ERROR;
1128                      notify_msg.message.error.frame_number = i->frame_number;
1129                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1130                      notify_msg.message.error.error_stream = j->stream;
1131                      mCallbackOps->notify(mCallbackOps, &notify_msg);
1132                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1133                             __func__, i->frame_number, streamID);
1134                      PendingFrameDropInfo PendingFrameDrop;
1135                      PendingFrameDrop.frame_number=i->frame_number;
1136                      PendingFrameDrop.stream_ID = streamID;
1137                      // Add the Frame drop info to mPendingFrameDropList
1138                      mPendingFrameDropList.push_back(PendingFrameDrop);
1139                  }
1140                }
1141            }
1142        }
1143
1144        // Send empty metadata with already filled buffers for dropped metadata
1145        // and send valid metadata with already filled buffers for current metadata
1146        if (i->frame_number < frame_number) {
1147            CameraMetadata dummyMetadata;
1148            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1149                    &i->timestamp, 1);
1150            dummyMetadata.update(ANDROID_REQUEST_ID,
1151                    &(i->request_id), 1);
1152            result.result = dummyMetadata.release();
1153        } else {
1154            result.result = translateFromHalMetadata(metadata,
1155                    i->timestamp, i->request_id, i->blob_request);
1156
1157            if (i->blob_request) {
1158                {
1159                    //Dump tuning metadata if enabled and available
1160                    char prop[PROPERTY_VALUE_MAX];
1161                    memset(prop, 0, sizeof(prop));
1162                    property_get("persist.camera.dumpmetadata", prop, "0");
1163                    int32_t enabled = atoi(prop);
1164                    if (enabled && metadata->is_tuning_params_valid) {
1165                        dumpMetadataToFile(metadata->tuning_params,
1166                               mMetaFrameCount,
1167                               enabled,
1168                               "Snapshot",
1169                               frame_number);
1170                    }
1171                }
1172
1173                //If it is a blob request then send the metadata to the picture channel
1174                metadata_buffer_t *reproc_meta =
1175                        (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
1176                if (reproc_meta == NULL) {
1177                    ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
1178                    goto done_metadata;
1179                }
1180                *reproc_meta = *metadata;
1181                mPictureChannel->queueReprocMetadata(reproc_meta);
1182            }
1183            // Return metadata buffer
1184            mMetadataChannel->bufDone(metadata_buf);
1185            free(metadata_buf);
1186        }
1187        if (!result.result) {
1188            ALOGE("%s: metadata is NULL", __func__);
1189        }
1190        result.frame_number = i->frame_number;
1191        result.num_output_buffers = 0;
1192        result.output_buffers = NULL;
1193        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1194                    j != i->buffers.end(); j++) {
1195            if (j->buffer) {
1196                result.num_output_buffers++;
1197            }
1198        }
1199
1200        if (result.num_output_buffers > 0) {
1201            camera3_stream_buffer_t *result_buffers =
1202                new camera3_stream_buffer_t[result.num_output_buffers];
1203            if (!result_buffers) {
1204                ALOGE("%s: Fatal error: out of memory", __func__);
1205            }
1206            size_t result_buffers_idx = 0;
1207            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1208                    j != i->buffers.end(); j++) {
1209                if (j->buffer) {
1210                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1211                            m != mPendingFrameDropList.end(); m++) {
1212                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1213                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1214                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1215                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1216                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1217                                  __func__, frame_number, streamID);
1218                            m = mPendingFrameDropList.erase(m);
1219                            break;
1220                        }
1221                    }
1222
1223                    for (List<PendingBufferInfo>::iterator k =
1224                      mPendingBuffersMap.mPendingBufferList.begin();
1225                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1226                      if (k->buffer == j->buffer->buffer) {
1227                        ALOGV("%s: Found buffer %p in pending buffer List "
1228                              "for frame %d, Take it out!!", __func__,
1229                               k->buffer, k->frame_number);
1230                        mPendingBuffersMap.num_buffers--;
1231                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
1232                        break;
1233                      }
1234                    }
1235
1236                    result_buffers[result_buffers_idx++] = *(j->buffer);
1237                    free(j->buffer);
1238                    j->buffer = NULL;
1239                }
1240            }
1241            result.output_buffers = result_buffers;
1242
1243            mCallbackOps->process_capture_result(mCallbackOps, &result);
1244            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1245                    __func__, result.frame_number, i->timestamp);
1246            free_camera_metadata((camera_metadata_t *)result.result);
1247            delete[] result_buffers;
1248        } else {
1249            mCallbackOps->process_capture_result(mCallbackOps, &result);
1250            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1251                        __func__, result.frame_number, i->timestamp);
1252            free_camera_metadata((camera_metadata_t *)result.result);
1253        }
1254        // erase the element from the list
1255        i = mPendingRequestsList.erase(i);
1256    }
1257
1258done_metadata:
1259    if (!pending_requests)
1260        unblockRequestIfNecessary();
1261
1262}
1263
1264/*===========================================================================
1265 * FUNCTION   : handleBufferWithLock
1266 *
1267 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1268 *
1269 * PARAMETERS : @buffer: image buffer for the callback
1270 *              @frame_number: frame number of the image buffer
1271 *
1272 * RETURN     :
1273 *
1274 *==========================================================================*/
1275void QCamera3HardwareInterface::handleBufferWithLock(
1276    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1277{
1278    // If the frame number doesn't exist in the pending request list,
1279    // directly send the buffer to the frameworks, and update pending buffers map
1280    // Otherwise, book-keep the buffer.
1281    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1282    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1283        i++;
1284    }
1285    if (i == mPendingRequestsList.end()) {
1286        // Verify all pending requests frame_numbers are greater
1287        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1288                j != mPendingRequestsList.end(); j++) {
1289            if (j->frame_number < frame_number) {
1290                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1291                        __func__, j->frame_number, frame_number);
1292            }
1293        }
1294        camera3_capture_result_t result;
1295        result.result = NULL;
1296        result.frame_number = frame_number;
1297        result.num_output_buffers = 1;
1298        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1299                m != mPendingFrameDropList.end(); m++) {
1300            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1301            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1302            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1303                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1304                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1305                        __func__, frame_number, streamID);
1306                m = mPendingFrameDropList.erase(m);
1307                break;
1308            }
1309        }
1310        result.output_buffers = buffer;
1311        ALOGV("%s: result frame_number = %d, buffer = %p",
1312                __func__, frame_number, buffer->buffer);
1313
1314        for (List<PendingBufferInfo>::iterator k =
1315                mPendingBuffersMap.mPendingBufferList.begin();
1316                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1317            if (k->buffer == buffer->buffer) {
1318                ALOGV("%s: Found Frame buffer, take it out from list",
1319                        __func__);
1320
1321                mPendingBuffersMap.num_buffers--;
1322                k = mPendingBuffersMap.mPendingBufferList.erase(k);
1323                break;
1324            }
1325        }
1326        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1327            __func__, mPendingBuffersMap.num_buffers);
1328
1329        mCallbackOps->process_capture_result(mCallbackOps, &result);
1330    } else {
1331        if (i->input_buffer_present) {
1332            camera3_capture_result result;
1333            result.result = NULL;
1334            result.frame_number = frame_number;
1335            result.num_output_buffers = 1;
1336            result.output_buffers = buffer;
1337            mCallbackOps->process_capture_result(mCallbackOps, &result);
1338            i = mPendingRequestsList.erase(i);
1339            mPendingRequest--;
1340            unblockRequestIfNecessary();
1341        } else {
1342            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1343                j != i->buffers.end(); j++) {
1344                if (j->stream == buffer->stream) {
1345                    if (j->buffer != NULL) {
1346                        ALOGE("%s: Error: buffer is already set", __func__);
1347                    } else {
1348                        j->buffer = (camera3_stream_buffer_t *)malloc(
1349                            sizeof(camera3_stream_buffer_t));
1350                        *(j->buffer) = *buffer;
1351                        ALOGV("%s: cache buffer %p at result frame_number %d",
1352                            __func__, buffer, frame_number);
1353                    }
1354                }
1355            }
1356        }
1357    }
1358}
1359
1360/*===========================================================================
1361 * FUNCTION   : unblockRequestIfNecessary
1362 *
1363 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1364 *              that mMutex is held when this function is called.
1365 *
1366 * PARAMETERS :
1367 *
1368 * RETURN     :
1369 *
1370 *==========================================================================*/
1371void QCamera3HardwareInterface::unblockRequestIfNecessary()
1372{
1373    bool max_buffers_dequeued = false;
1374
1375    uint32_t queued_buffers = 0;
1376    for(List<stream_info_t*>::iterator it=mStreamInfo.begin();
1377        it != mStreamInfo.end(); it++) {
1378        queued_buffers = 0;
1379        for (List<PendingBufferInfo>::iterator k =
1380            mPendingBuffersMap.mPendingBufferList.begin();
1381            k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1382            if (k->stream == (*it)->stream)
1383                queued_buffers++;
1384
1385            ALOGV("%s: Dequeued %d buffers for stream %p", __func__,
1386                queued_buffers, (*it)->stream);
1387            if (queued_buffers >=(* it)->stream->max_buffers) {
1388                ALOGV("%s: Wait!!! Max buffers Dequed", __func__);
1389                max_buffers_dequeued = true;
1390                break;
1391            }
1392        }
1393    }
1394
1395    if (!max_buffers_dequeued) {
1396        // Unblock process_capture_request
1397        pthread_cond_signal(&mRequestCond);
1398    }
1399}
1400
1401/*===========================================================================
1402 * FUNCTION   : registerStreamBuffers
1403 *
1404 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1405 *
1406 * PARAMETERS :
1407 *   @stream_list : streams to be configured
1408 *
1409 * RETURN     :
1410 *
1411 *==========================================================================*/
1412int QCamera3HardwareInterface::registerStreamBuffers(
1413        const camera3_stream_buffer_set_t * /*buffer_set*/)
1414{
1415    //Deprecated
1416    return NO_ERROR;
1417}
1418
1419/*===========================================================================
1420 * FUNCTION   : processCaptureRequest
1421 *
1422 * DESCRIPTION: process a capture request from camera service
1423 *
1424 * PARAMETERS :
1425 *   @request : request from framework to process
1426 *
1427 * RETURN     :
1428 *
1429 *==========================================================================*/
1430int QCamera3HardwareInterface::processCaptureRequest(
1431                    camera3_capture_request_t *request)
1432{
1433    int rc = NO_ERROR;
1434    int32_t request_id;
1435    CameraMetadata meta;
1436
1437    pthread_mutex_lock(&mMutex);
1438
1439    rc = validateCaptureRequest(request);
1440    if (rc != NO_ERROR) {
1441        ALOGE("%s: incoming request is not valid", __func__);
1442        pthread_mutex_unlock(&mMutex);
1443        return rc;
1444    }
1445
1446    meta = request->settings;
1447
1448    // For first capture request, send capture intent, and
1449    // stream on all streams
1450    if (mFirstRequest) {
1451
1452        for (size_t i = 0; i < request->num_output_buffers; i++) {
1453            const camera3_stream_buffer_t& output = request->output_buffers[i];
1454            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1455            rc = channel->registerBuffer(output.buffer);
1456            if (rc < 0) {
1457                ALOGE("%s: registerBuffer failed",
1458                        __func__);
1459                pthread_mutex_unlock(&mMutex);
1460                return -ENODEV;
1461            }
1462        }
1463
1464        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1465            int32_t hal_version = CAM_HAL_V3;
1466            uint8_t captureIntent =
1467                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1468
1469            memset(mParameters, 0, sizeof(metadata_buffer_t));
1470            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1471            AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1472                sizeof(hal_version), &hal_version);
1473            AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1474                sizeof(captureIntent), &captureIntent);
1475            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1476                mParameters);
1477        }
1478
1479        ALOGD("%s: Start META Channel", __func__);
1480        mMetadataChannel->start();
1481
1482        //First initialize all streams
1483        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1484            it != mStreamInfo.end(); it++) {
1485            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1486            rc = channel->initialize();
1487            if (NO_ERROR != rc) {
1488                ALOGE("%s : Channel initialization failed %d", __func__, rc);
1489                mMetadataChannel->stop();
1490                pthread_mutex_unlock(&mMutex);
1491                return rc;
1492            }
1493        }
1494        //Then start them.
1495        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1496            it != mStreamInfo.end(); it++) {
1497            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1498            ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
1499            channel->start();
1500        }
1501    }
1502
1503    uint32_t frameNumber = request->frame_number;
1504    cam_stream_ID_t streamID;
1505
1506    if (meta.exists(ANDROID_REQUEST_ID)) {
1507        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1508        mCurrentRequestId = request_id;
1509        ALOGV("%s: Received request with id: %d",__func__, request_id);
1510    } else if (mFirstRequest || mCurrentRequestId == -1){
1511        ALOGE("%s: Unable to find request id field, \
1512                & no previous id available", __func__);
1513        return NAME_NOT_FOUND;
1514    } else {
1515        ALOGV("%s: Re-using old request id", __func__);
1516        request_id = mCurrentRequestId;
1517    }
1518
1519    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1520                                    __func__, __LINE__,
1521                                    request->num_output_buffers,
1522                                    request->input_buffer,
1523                                    frameNumber);
1524    // Acquire all request buffers first
1525    streamID.num_streams = 0;
1526    int blob_request = 0;
1527    for (size_t i = 0; i < request->num_output_buffers; i++) {
1528        const camera3_stream_buffer_t& output = request->output_buffers[i];
1529        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1530        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1531
1532        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1533            //Call function to store local copy of jpeg data for encode params.
1534            blob_request = 1;
1535        }
1536
1537        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1538        if (rc != OK) {
1539            ALOGE("%s: fence wait failed %d", __func__, rc);
1540            pthread_mutex_unlock(&mMutex);
1541            return rc;
1542        }
1543
1544        streamID.streamID[streamID.num_streams] =
1545            channel->getStreamID(channel->getStreamTypeMask());
1546        streamID.num_streams++;
1547    }
1548
1549    if(request->input_buffer == NULL) {
1550       rc = setFrameParameters(request, streamID);
1551        if (rc < 0) {
1552            ALOGE("%s: fail to set frame parameters", __func__);
1553            pthread_mutex_unlock(&mMutex);
1554            return rc;
1555        }
1556    }
1557
1558    /* Update pending request list and pending buffers map */
1559    PendingRequestInfo pendingRequest;
1560    pendingRequest.frame_number = frameNumber;
1561    pendingRequest.num_buffers = request->num_output_buffers;
1562    pendingRequest.request_id = request_id;
1563    pendingRequest.blob_request = blob_request;
1564    pendingRequest.bNotified = 0;
1565    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1566
1567    for (size_t i = 0; i < request->num_output_buffers; i++) {
1568        RequestedBufferInfo requestedBuf;
1569        requestedBuf.stream = request->output_buffers[i].stream;
1570        requestedBuf.buffer = NULL;
1571        pendingRequest.buffers.push_back(requestedBuf);
1572
1573        // Add to buffer handle the pending buffers list
1574        PendingBufferInfo bufferInfo;
1575        bufferInfo.frame_number = frameNumber;
1576        bufferInfo.buffer = request->output_buffers[i].buffer;
1577        bufferInfo.stream = request->output_buffers[i].stream;
1578        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1579        mPendingBuffersMap.num_buffers++;
1580        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1581          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1582          bufferInfo.stream->format);
1583    }
1584    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1585          __func__, mPendingBuffersMap.num_buffers);
1586    mPendingRequestsList.push_back(pendingRequest);
1587
1588    // Notify metadata channel we receive a request
1589    mMetadataChannel->request(NULL, frameNumber);
1590
1591    // Call request on other streams
1592    for (size_t i = 0; i < request->num_output_buffers; i++) {
1593        const camera3_stream_buffer_t& output = request->output_buffers[i];
1594        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1595        mm_camera_buf_def_t *pInputBuffer = NULL;
1596
1597        if (channel == NULL) {
1598            ALOGE("%s: invalid channel pointer for stream", __func__);
1599            continue;
1600        }
1601
1602        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1603            QCamera3RegularChannel* inputChannel = NULL;
1604            if(request->input_buffer != NULL){
1605
1606                //Try to get the internal format
1607                inputChannel = (QCamera3RegularChannel*)
1608                    request->input_buffer->stream->priv;
1609                if(inputChannel == NULL ){
1610                    ALOGE("%s: failed to get input channel handle", __func__);
1611                } else {
1612                    pInputBuffer =
1613                        inputChannel->getInternalFormatBuffer(
1614                                request->input_buffer->buffer);
1615                    ALOGD("%s: Input buffer dump",__func__);
1616                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1617                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1618                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1619                    ALOGD("Handle:%p", request->input_buffer->buffer);
1620                }
1621                rc = channel->request(output.buffer, frameNumber,
1622                            pInputBuffer, mParameters);
1623                if (rc < 0) {
1624                    ALOGE("%s: Fail to request on picture channel", __func__);
1625                    pthread_mutex_unlock(&mMutex);
1626                    return rc;
1627                }
1628
1629                rc = setReprocParameters(request);
1630                if (rc < 0) {
1631                    ALOGE("%s: fail to set reproc parameters", __func__);
1632                    pthread_mutex_unlock(&mMutex);
1633                    return rc;
1634                }
1635            } else
1636                rc = channel->request(output.buffer, frameNumber,
1637                            NULL, mParameters);
1638        } else {
1639            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1640                __LINE__, output.buffer, frameNumber);
1641           rc = channel->request(output.buffer, frameNumber);
1642        }
1643        if (rc < 0)
1644            ALOGE("%s: request failed", __func__);
1645    }
1646
1647    mFirstRequest = false;
1648    // Added a timed condition wait
1649    struct timespec ts;
1650    uint8_t isValidTimeout = 1;
1651    rc = clock_gettime(CLOCK_REALTIME, &ts);
1652    if (rc < 0) {
1653        isValidTimeout = 0;
1654        ALOGE("%s: Error reading the real time clock!!", __func__);
1655    }
1656    else {
1657        // Make timeout as 5 sec for request to be honored
1658        ts.tv_sec += 5;
1659    }
1660    //Block on conditional variable
1661    mPendingRequest++;
1662    while (mPendingRequest >= kMaxInFlight) {
1663        if (!isValidTimeout) {
1664            ALOGV("%s: Blocking on conditional wait", __func__);
1665            pthread_cond_wait(&mRequestCond, &mMutex);
1666        }
1667        else {
1668            ALOGV("%s: Blocking on timed conditional wait", __func__);
1669            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1670            if (rc == ETIMEDOUT) {
1671                rc = -ENODEV;
1672                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1673                break;
1674            }
1675        }
1676        ALOGV("%s: Unblocked", __func__);
1677    }
1678    pthread_mutex_unlock(&mMutex);
1679
1680    return rc;
1681}
1682
1683/*===========================================================================
1684 * FUNCTION   : dump
1685 *
1686 * DESCRIPTION:
1687 *
1688 * PARAMETERS :
1689 *
1690 *
1691 * RETURN     :
1692 *==========================================================================*/
1693void QCamera3HardwareInterface::dump(int /*fd*/)
1694{
1695    /*Enable lock when we implement this function*/
1696    /*
1697    pthread_mutex_lock(&mMutex);
1698
1699    pthread_mutex_unlock(&mMutex);
1700    */
1701    return;
1702}
1703
1704/*===========================================================================
1705 * FUNCTION   : flush
1706 *
1707 * DESCRIPTION:
1708 *
1709 * PARAMETERS :
1710 *
1711 *
1712 * RETURN     :
1713 *==========================================================================*/
1714int QCamera3HardwareInterface::flush()
1715{
1716
1717    unsigned int frameNum = 0;
1718    camera3_notify_msg_t notify_msg;
1719    camera3_capture_result_t result;
1720    camera3_stream_buffer_t pStream_Buf;
1721
1722    ALOGV("%s: Unblocking Process Capture Request", __func__);
1723
1724    // Stop the Streams/Channels
1725    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1726        it != mStreamInfo.end(); it++) {
1727        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1728        channel->stop();
1729        (*it)->status = INVALID;
1730    }
1731
1732    if (mMetadataChannel) {
1733        /* If content of mStreamInfo is not 0, there is metadata stream */
1734        mMetadataChannel->stop();
1735    }
1736
1737    // Mutex Lock
1738    pthread_mutex_lock(&mMutex);
1739
1740    // Unblock process_capture_request
1741    mPendingRequest = 0;
1742    pthread_cond_signal(&mRequestCond);
1743
1744    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1745    frameNum = i->frame_number;
1746    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
1747      __func__, frameNum);
1748
1749    // Go through the pending buffers and send buffer errors
1750    for (List<PendingBufferInfo>::iterator k =
1751         mPendingBuffersMap.mPendingBufferList.begin();
1752         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
1753         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1754          __func__, k->frame_number, k->buffer, k->stream,
1755          k->stream->format);
1756
1757        if (k->frame_number < frameNum) {
1758            // Send Error notify to frameworks for each buffer for which
1759            // metadata buffer is already sent
1760            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
1761              __func__, k->frame_number, k->buffer);
1762
1763            notify_msg.type = CAMERA3_MSG_ERROR;
1764            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1765            notify_msg.message.error.error_stream = k->stream;
1766            notify_msg.message.error.frame_number = k->frame_number;
1767            mCallbackOps->notify(mCallbackOps, &notify_msg);
1768            ALOGV("%s: notify frame_number = %d", __func__,
1769                    i->frame_number);
1770
1771            pStream_Buf.acquire_fence = -1;
1772            pStream_Buf.release_fence = -1;
1773            pStream_Buf.buffer = k->buffer;
1774            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1775            pStream_Buf.stream = k->stream;
1776
1777            result.result = NULL;
1778            result.frame_number = k->frame_number;
1779            result.num_output_buffers = 1;
1780            result.output_buffers = &pStream_Buf ;
1781            mCallbackOps->process_capture_result(mCallbackOps, &result);
1782
1783            mPendingBuffersMap.num_buffers--;
1784            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1785        }
1786        else {
1787          k++;
1788        }
1789    }
1790
1791    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1792
1793    // Go through the pending requests info and send error request to framework
1794    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
1795        int numBuffers = 0;
1796        ALOGV("%s:Sending ERROR REQUEST for frame %d",
1797              __func__, i->frame_number);
1798
1799        // Send shutter notify to frameworks
1800        notify_msg.type = CAMERA3_MSG_ERROR;
1801        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
1802        notify_msg.message.error.error_stream = NULL;
1803        notify_msg.message.error.frame_number = i->frame_number;
1804        mCallbackOps->notify(mCallbackOps, &notify_msg);
1805
1806        result.frame_number = i->frame_number;
1807        result.num_output_buffers = 0;
1808        result.output_buffers = NULL;
1809        numBuffers = 0;
1810
1811        for (List<PendingBufferInfo>::iterator k =
1812             mPendingBuffersMap.mPendingBufferList.begin();
1813             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
1814          if (k->frame_number == i->frame_number) {
1815            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
1816                   " stream = %p, stream format = %d",__func__,
1817                   k->frame_number, k->buffer, k->stream, k->stream->format);
1818
1819            pStream_Buf.acquire_fence = -1;
1820            pStream_Buf.release_fence = -1;
1821            pStream_Buf.buffer = k->buffer;
1822            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1823            pStream_Buf.stream = k->stream;
1824
1825            result.num_output_buffers = 1;
1826            result.output_buffers = &pStream_Buf;
1827            result.result = NULL;
1828            result.frame_number = i->frame_number;
1829
1830            mCallbackOps->process_capture_result(mCallbackOps, &result);
1831            mPendingBuffersMap.num_buffers--;
1832            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1833            numBuffers++;
1834          }
1835          else {
1836            k++;
1837          }
1838        }
1839        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1840              __func__, mPendingBuffersMap.num_buffers);
1841
1842        i = mPendingRequestsList.erase(i);
1843    }
1844
1845    /* Reset pending buffer list and requests list */
1846    mPendingRequestsList.clear();
1847    /* Reset pending frame Drop list and requests list */
1848    mPendingFrameDropList.clear();
1849
1850    mPendingBuffersMap.num_buffers = 0;
1851    mPendingBuffersMap.mPendingBufferList.clear();
1852    ALOGV("%s: Cleared all the pending buffers ", __func__);
1853
1854    mFirstRequest = true;
1855    pthread_mutex_unlock(&mMutex);
1856    return 0;
1857}
1858
1859/*===========================================================================
1860 * FUNCTION   : captureResultCb
1861 *
1862 * DESCRIPTION: Callback handler for all capture result
1863 *              (streams, as well as metadata)
1864 *
1865 * PARAMETERS :
1866 *   @metadata : metadata information
1867 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1868 *               NULL if metadata.
1869 *
1870 * RETURN     : NONE
1871 *==========================================================================*/
1872void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1873                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1874{
1875    pthread_mutex_lock(&mMutex);
1876
1877    /* Assume flush() is called before any reprocessing. Send
1878     * notify and result immediately upon receipt of any callback*/
1879    if (mLoopBackResult) {
1880        /* Send notify */
1881        camera3_notify_msg_t notify_msg;
1882        notify_msg.type = CAMERA3_MSG_SHUTTER;
1883        notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
1884        notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
1885        mCallbackOps->notify(mCallbackOps, &notify_msg);
1886
1887        /* Send capture result */
1888        mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
1889        free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
1890        free(mLoopBackResult);
1891        mLoopBackResult = NULL;
1892    }
1893
1894    if (metadata_buf)
1895        handleMetadataWithLock(metadata_buf);
1896    else
1897        handleBufferWithLock(buffer, frame_number);
1898
1899    pthread_mutex_unlock(&mMutex);
1900    return;
1901}
1902
1903/*===========================================================================
1904 * FUNCTION   : translateFromHalMetadata
1905 *
1906 * DESCRIPTION:
1907 *
1908 * PARAMETERS :
1909 *   @metadata : metadata information from callback
1910 *
1911 * RETURN     : camera_metadata_t*
1912 *              metadata in a format specified by fwk
1913 *==========================================================================*/
1914camera_metadata_t*
1915QCamera3HardwareInterface::translateFromHalMetadata
1916                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1917                                 int32_t request_id, int32_t blob)
1918{
1919    CameraMetadata camMetadata;
1920    camera_metadata_t* resultMetadata;
1921
1922    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1923    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1924
1925    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1926    uint8_t next_entry;
1927    while (curr_entry != CAM_INTF_PARM_MAX) {
1928       switch (curr_entry) {
1929         case CAM_INTF_META_FRAME_NUMBER:{
1930             int64_t frame_number = *(uint32_t *) POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1931             camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
1932             break;
1933         }
1934         case CAM_INTF_META_FACE_DETECTION:{
1935             cam_face_detection_data_t *faceDetectionInfo =
1936                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1937             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1938             int32_t faceIds[MAX_ROI];
1939             uint8_t faceScores[MAX_ROI];
1940             int32_t faceRectangles[MAX_ROI * 4];
1941             int32_t faceLandmarks[MAX_ROI * 6];
1942             int j = 0, k = 0;
1943             for (int i = 0; i < numFaces; i++) {
1944                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1945                 faceScores[i] = faceDetectionInfo->faces[i].score;
1946                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1947                         faceRectangles+j, -1);
1948                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1949                 j+= 4;
1950                 k+= 6;
1951             }
1952
1953             if (numFaces <= 0) {
1954                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1955                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1956                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1957                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1958             }
1959
1960             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1961             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1962             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1963               faceRectangles, numFaces*4);
1964             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1965               faceLandmarks, numFaces*6);
1966
1967            break;
1968            }
1969         case CAM_INTF_META_COLOR_CORRECT_MODE:{
1970             uint8_t  *color_correct_mode =
1971                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1972             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1973             break;
1974          }
1975
1976         // 3A state is sent in urgent partial result (uses quirk)
1977         case CAM_INTF_META_AEC_PRECAPTURE_ID:
1978         case CAM_INTF_META_AEC_ROI:
1979         case CAM_INTF_META_AEC_STATE:
1980         case CAM_INTF_PARM_FOCUS_MODE:
1981         case CAM_INTF_META_AF_ROI:
1982         case CAM_INTF_META_AF_STATE:
1983         case CAM_INTF_META_AF_TRIGGER_ID:
1984         case CAM_INTF_PARM_WHITE_BALANCE:
1985         case CAM_INTF_META_AWB_REGIONS:
1986         case CAM_INTF_META_AWB_STATE:
1987         case CAM_INTF_META_MODE: {
1988           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
1989           break;
1990         }
1991
1992          case CAM_INTF_META_EDGE_MODE: {
1993             cam_edge_application_t  *edgeApplication =
1994                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1995             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
1996             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
1997             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
1998             break;
1999          }
2000          case CAM_INTF_META_FLASH_POWER: {
2001             uint8_t  *flashPower =
2002                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2003             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2004             break;
2005          }
2006          case CAM_INTF_META_FLASH_FIRING_TIME: {
2007             int64_t  *flashFiringTime =
2008                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2009             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2010             break;
2011          }
2012          case CAM_INTF_META_FLASH_STATE: {
2013             uint8_t  *flashState =
2014                (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
2015             camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
2016             break;
2017          }
2018          case CAM_INTF_META_FLASH_MODE:{
2019             uint8_t *flashMode = (uint8_t*)
2020                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
2021             camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
2022             break;
2023          }
2024          case CAM_INTF_META_HOTPIXEL_MODE: {
2025              uint8_t  *hotPixelMode =
2026                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2027              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2028              break;
2029          }
2030          case CAM_INTF_META_LENS_APERTURE:{
2031             float  *lensAperture =
2032                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2033             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2034             break;
2035          }
2036          case CAM_INTF_META_LENS_FILTERDENSITY: {
2037             float  *filterDensity =
2038                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2039             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2040             break;
2041          }
2042          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2043             float  *focalLength =
2044                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2045             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2046             break;
2047          }
2048          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2049             float  *focusDistance =
2050                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2051             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2052             break;
2053          }
2054          case CAM_INTF_META_LENS_FOCUS_RANGE: {
2055             float  *focusRange =
2056                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2057             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2058             break;
2059          }
2060          case CAM_INTF_META_LENS_STATE: {
2061             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2062             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2063             break;
2064          }
2065          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2066             uint8_t  *opticalStab =
2067                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2068             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2069             break;
2070          }
2071          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2072             uint8_t  *noiseRedMode =
2073                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2074             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2075             break;
2076          }
2077          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2078             uint8_t  *noiseRedStrength =
2079                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2080             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2081             break;
2082          }
2083          case CAM_INTF_META_SCALER_CROP_REGION: {
2084             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2085             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2086             int32_t scalerCropRegion[4];
2087             scalerCropRegion[0] = hScalerCropRegion->left;
2088             scalerCropRegion[1] = hScalerCropRegion->top;
2089             scalerCropRegion[2] = hScalerCropRegion->width;
2090             scalerCropRegion[3] = hScalerCropRegion->height;
2091             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2092             break;
2093          }
2094          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2095             int64_t  *sensorExpTime =
2096                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2097             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2098             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2099             break;
2100          }
2101          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2102             int64_t  *sensorFameDuration =
2103                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2104             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2105             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2106             break;
2107          }
2108          case CAM_INTF_META_SENSOR_SENSITIVITY:{
2109             int32_t  *sensorSensitivity =
2110                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2111             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
2112             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
2113             break;
2114          }
2115          case CAM_INTF_META_SHADING_MODE: {
2116             uint8_t  *shadingMode =
2117                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2118             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2119             break;
2120          }
2121          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2122             uint8_t  *faceDetectMode =
2123                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2124             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2125                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2126                                                        *faceDetectMode);
2127             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2128             break;
2129          }
2130          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2131             uint8_t  *histogramMode =
2132                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2133             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2134             break;
2135          }
2136          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2137               uint8_t  *sharpnessMapMode =
2138                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2139               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2140                                  sharpnessMapMode, 1);
2141               break;
2142           }
2143          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2144               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2145               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2146               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2147                                  (int32_t*)sharpnessMap->sharpness,
2148                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2149               break;
2150          }
2151          case CAM_INTF_META_LENS_SHADING_MAP: {
2152               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2153               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2154               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2155               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2156               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2157                                  (float*)lensShadingMap->lens_shading,
2158                                  4*map_width*map_height);
2159               break;
2160          }
2161
2162          case CAM_INTF_META_TONEMAP_MODE: {
2163             uint8_t  *toneMapMode =
2164                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2165             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2166             break;
2167          }
2168
2169          case CAM_INTF_META_TONEMAP_CURVES:{
2170             //Populate CAM_INTF_META_TONEMAP_CURVES
2171             /* ch0 = G, ch 1 = B, ch 2 = R*/
2172             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2173             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2174             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2175                                (float*)tonemap->curves[0].tonemap_points,
2176                                tonemap->tonemap_points_cnt * 2);
2177
2178             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2179                                (float*)tonemap->curves[1].tonemap_points,
2180                                tonemap->tonemap_points_cnt * 2);
2181
2182             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2183                                (float*)tonemap->curves[2].tonemap_points,
2184                                tonemap->tonemap_points_cnt * 2);
2185             break;
2186          }
2187          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2188             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2189             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2190             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2191             break;
2192          }
2193          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2194              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2195              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2196              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2197                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2198              break;
2199          }
2200          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2201             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2202             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2203             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2204                       predColorCorrectionGains->gains, 4);
2205             break;
2206          }
2207          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2208             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2209                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2210             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2211                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2212             break;
2213
2214          }
2215
2216          case CAM_INTF_META_OTP_WB_GRGB:{
2217             float *otpWbGrGb = (float*) POINTER_OF(CAM_INTF_META_OTP_WB_GRGB, metadata);
2218             camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
2219             break;
2220          }
2221
2222          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2223             uint8_t *blackLevelLock = (uint8_t*)
2224               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2225             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2226             break;
2227          }
2228          case CAM_INTF_META_SCENE_FLICKER:{
2229             uint8_t *sceneFlicker = (uint8_t*)
2230             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2231             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2232             break;
2233          }
2234          case CAM_INTF_PARM_LED_MODE:
2235             break;
2236          case CAM_INTF_PARM_EFFECT: {
2237             uint8_t *effectMode = (uint8_t*)
2238                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2239             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2240                                                    sizeof(EFFECT_MODES_MAP),
2241                                                    *effectMode);
2242             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2243             break;
2244          }
2245          case CAM_INTF_META_TEST_PATTERN_DATA: {
2246             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2247                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2248             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2249                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2250                     testPatternData->mode);
2251             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2252                     &fwk_testPatternMode, 1);
2253             break;
2254          }
2255          case CAM_INTF_META_JPEG_GPS_COORDINATES: {
2256              double *gps_coords = (double *)POINTER_OF(
2257                      CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2258              camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
2259              break;
2260          }
2261          case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
2262              char *gps_methods = (char *)POINTER_OF(
2263                      CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2264              String8 str(gps_methods);
2265              camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2266              break;
2267          }
2268          case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
2269              int64_t *gps_timestamp = (int64_t *)POINTER_OF(
2270                      CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2271              camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
2272              break;
2273          }
2274          case CAM_INTF_META_JPEG_ORIENTATION: {
2275              int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
2276                      CAM_INTF_META_JPEG_ORIENTATION, metadata);
2277              camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
2278              break;
2279          }
2280          case CAM_INTF_META_JPEG_QUALITY: {
2281              uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
2282                      CAM_INTF_META_JPEG_QUALITY, metadata);
2283              camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
2284              break;
2285          }
2286          case CAM_INTF_META_JPEG_THUMB_QUALITY: {
2287              uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
2288                      CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2289              camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
2290              break;
2291          }
2292
2293          case CAM_INTF_META_JPEG_THUMB_SIZE: {
2294              cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
2295                      CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2296              camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
2297              break;
2298          }
2299
2300             break;
2301          case CAM_INTF_META_PRIVATE_DATA: {
2302             uint8_t *privateData = (uint8_t *)
2303                 POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
2304             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
2305                 privateData, MAX_METADATA_PAYLOAD_SIZE);
2306             break;
2307          }
2308          default:
2309             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2310                   __func__, curr_entry);
2311             break;
2312       }
2313       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2314       curr_entry = next_entry;
2315    }
2316
2317    uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
2318    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2319
2320    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
2321    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
2322
2323    int32_t hotPixelMap[2];
2324    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2325
2326    resultMetadata = camMetadata.release();
2327    return resultMetadata;
2328}
2329
2330/*===========================================================================
2331 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2332 *
2333 * DESCRIPTION:
2334 *
2335 * PARAMETERS :
2336 *   @metadata : metadata information from callback
2337 *
2338 * RETURN     : camera_metadata_t*
2339 *              metadata in a format specified by fwk
2340 *==========================================================================*/
2341camera_metadata_t*
2342QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2343                                (metadata_buffer_t *metadata) {
2344
2345    CameraMetadata camMetadata;
2346    camera_metadata_t* resultMetadata;
2347
2348    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2349    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2350
2351    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2352    uint8_t next_entry;
2353    while (curr_entry != CAM_INTF_PARM_MAX) {
2354      switch (curr_entry) {
2355        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2356            int32_t  *ae_precapture_id =
2357              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2358            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2359                                          ae_precapture_id, 1);
2360            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2361          break;
2362        }
2363        case CAM_INTF_META_AEC_ROI: {
2364            cam_area_t  *hAeRegions =
2365                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2366            int32_t aeRegions[5];
2367            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2368            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2369            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
2370            break;
2371        }
2372        case CAM_INTF_META_AEC_STATE:{
2373            uint8_t *ae_state =
2374                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2375            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2376            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2377            break;
2378        }
2379        case CAM_INTF_PARM_FOCUS_MODE:{
2380            uint8_t  *focusMode =
2381                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2382            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2383               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2384            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2385            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2386            break;
2387        }
2388        case CAM_INTF_META_AF_ROI:{
2389            /*af regions*/
2390            cam_area_t  *hAfRegions =
2391                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2392            int32_t afRegions[5];
2393            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2394            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2395            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
2396            break;
2397        }
2398        case CAM_INTF_META_AF_STATE: {
2399            uint8_t  *afState =
2400               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2401            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2402            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2403            break;
2404        }
2405        case CAM_INTF_META_AF_TRIGGER_ID: {
2406            int32_t  *afTriggerId =
2407                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2408            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2409            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2410            break;
2411        }
2412        case CAM_INTF_PARM_WHITE_BALANCE: {
2413           uint8_t  *whiteBalance =
2414                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2415             uint8_t fwkWhiteBalanceMode =
2416                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2417                    sizeof(WHITE_BALANCE_MODES_MAP)/
2418                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2419             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2420                 &fwkWhiteBalanceMode, 1);
2421            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2422             break;
2423        }
2424        case CAM_INTF_META_AWB_REGIONS: {
2425           /*awb regions*/
2426           cam_area_t  *hAwbRegions =
2427               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2428           int32_t awbRegions[5];
2429           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2430           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2431           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2432           break;
2433        }
2434        case CAM_INTF_META_AWB_STATE: {
2435           uint8_t  *whiteBalanceState =
2436              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2437           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2438           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2439           break;
2440        }
2441        case CAM_INTF_META_MODE: {
2442            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2443            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2444            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
2445            break;
2446        }
2447        default:
2448            ALOGV("%s: Normal Metadata %d, do not process",
2449              __func__, curr_entry);
2450       }
2451       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2452       curr_entry = next_entry;
2453    }
2454    resultMetadata = camMetadata.release();
2455    return resultMetadata;
2456}
2457
2458/*===========================================================================
2459 * FUNCTION   : dumpMetadataToFile
2460 *
2461 * DESCRIPTION: Dumps tuning metadata to file system
2462 *
2463 * PARAMETERS :
2464 *   @meta           : tuning metadata
2465 *   @dumpFrameCount : current dump frame count
2466 *   @enabled        : Enable mask
2467 *
2468 *==========================================================================*/
2469void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
2470                                                   uint32_t &dumpFrameCount,
2471                                                   int32_t enabled,
2472                                                   const char *type,
2473                                                   uint32_t frameNumber)
2474{
2475    uint32_t frm_num = 0;
2476
2477    //Some sanity checks
2478    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
2479        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
2480              __func__,
2481              meta.tuning_sensor_data_size,
2482              TUNING_SENSOR_DATA_MAX);
2483        return;
2484    }
2485
2486    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
2487        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
2488              __func__,
2489              meta.tuning_vfe_data_size,
2490              TUNING_VFE_DATA_MAX);
2491        return;
2492    }
2493
2494    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
2495        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
2496              __func__,
2497              meta.tuning_cpp_data_size,
2498              TUNING_CPP_DATA_MAX);
2499        return;
2500    }
2501
2502    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
2503        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
2504              __func__,
2505              meta.tuning_cac_data_size,
2506              TUNING_CAC_DATA_MAX);
2507        return;
2508    }
2509    //
2510
2511    if(enabled){
2512        frm_num = ((enabled & 0xffff0000) >> 16);
2513        if(frm_num == 0) {
2514            frm_num = 10; //default 10 frames
2515        }
2516        if(frm_num > 256) {
2517            frm_num = 256; //256 buffers cycle around
2518        }
2519        if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
2520            // reset frame count if cycling
2521            dumpFrameCount = 0;
2522        }
2523        ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
2524        if (dumpFrameCount < frm_num) {
2525            char timeBuf[FILENAME_MAX];
2526            char buf[FILENAME_MAX];
2527            memset(buf, 0, sizeof(buf));
2528            memset(timeBuf, 0, sizeof(timeBuf));
2529            time_t current_time;
2530            struct tm * timeinfo;
2531            time (&current_time);
2532            timeinfo = localtime (&current_time);
2533            strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
2534            String8 filePath(timeBuf);
2535            snprintf(buf,
2536                     sizeof(buf),
2537                     "%d_HAL_META_%s_%d.bin",
2538                     dumpFrameCount,
2539                     type,
2540                     frameNumber);
2541            filePath.append(buf);
2542            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2543            if (file_fd > 0) {
2544                int written_len = 0;
2545                meta.tuning_data_version = TUNING_DATA_VERSION;
2546                void *data = (void *)((uint8_t *)&meta.tuning_data_version);
2547                written_len += write(file_fd, data, sizeof(uint32_t));
2548                data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
2549                ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
2550                written_len += write(file_fd, data, sizeof(uint32_t));
2551                data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
2552                ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
2553                written_len += write(file_fd, data, sizeof(uint32_t));
2554                data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
2555                ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
2556                written_len += write(file_fd, data, sizeof(uint32_t));
2557                data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
2558                ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
2559                written_len += write(file_fd, data, sizeof(uint32_t));
2560                int total_size = meta.tuning_sensor_data_size;
2561                data = (void *)((uint8_t *)&meta.data);
2562                written_len += write(file_fd, data, total_size);
2563                total_size = meta.tuning_vfe_data_size;
2564                data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
2565                written_len += write(file_fd, data, total_size);
2566                total_size = meta.tuning_cpp_data_size;
2567                data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
2568                written_len += write(file_fd, data, total_size);
2569                total_size = meta.tuning_cac_data_size;
2570                data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
2571                written_len += write(file_fd, data, total_size);
2572                close(file_fd);
2573            }else {
2574                ALOGE("%s: fail t open file for image dumping", __func__);
2575            }
2576            dumpFrameCount++;
2577        }
2578    }
2579}
2580
2581/*===========================================================================
2582 * FUNCTION   : cleanAndSortStreamInfo
2583 *
2584 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
2585 *              and sort them such that raw stream is at the end of the list
2586 *              This is a workaround for camera daemon constraint.
2587 *
2588 * PARAMETERS : None
2589 *
2590 *==========================================================================*/
2591void QCamera3HardwareInterface::cleanAndSortStreamInfo()
2592{
2593    List<stream_info_t *> newStreamInfo;
2594
2595    /*clean up invalid streams*/
2596    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2597            it != mStreamInfo.end();) {
2598        if(((*it)->status) == INVALID){
2599            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
2600            delete channel;
2601            free(*it);
2602            it = mStreamInfo.erase(it);
2603        } else {
2604            it++;
2605        }
2606    }
2607
2608    // Move preview/video/callback/snapshot streams into newList
2609    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2610            it != mStreamInfo.end();) {
2611        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
2612                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
2613            newStreamInfo.push_back(*it);
2614            it = mStreamInfo.erase(it);
2615        } else
2616            it++;
2617    }
2618    // Move raw streams into newList
2619    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2620            it != mStreamInfo.end();) {
2621        newStreamInfo.push_back(*it);
2622        it = mStreamInfo.erase(it);
2623    }
2624
2625    mStreamInfo = newStreamInfo;
2626}
2627
2628/*===========================================================================
2629 * FUNCTION   : convertToRegions
2630 *
2631 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2632 *
2633 * PARAMETERS :
2634 *   @rect   : cam_rect_t struct to convert
2635 *   @region : int32_t destination array
2636 *   @weight : if we are converting from cam_area_t, weight is valid
2637 *             else weight = -1
2638 *
2639 *==========================================================================*/
2640void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2641    region[0] = rect.left;
2642    region[1] = rect.top;
2643    region[2] = rect.left + rect.width;
2644    region[3] = rect.top + rect.height;
2645    if (weight > -1) {
2646        region[4] = weight;
2647    }
2648}
2649
2650/*===========================================================================
2651 * FUNCTION   : convertFromRegions
2652 *
2653 * DESCRIPTION: helper method to convert from array to cam_rect_t
2654 *
2655 * PARAMETERS :
2656 *   @rect   : cam_rect_t struct to convert
2657 *   @region : int32_t destination array
2658 *   @weight : if we are converting from cam_area_t, weight is valid
2659 *             else weight = -1
2660 *
2661 *==========================================================================*/
2662void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2663                                                   const camera_metadata_t *settings,
2664                                                   uint32_t tag){
2665    CameraMetadata frame_settings;
2666    frame_settings = settings;
2667    int32_t x_min = frame_settings.find(tag).data.i32[0];
2668    int32_t y_min = frame_settings.find(tag).data.i32[1];
2669    int32_t x_max = frame_settings.find(tag).data.i32[2];
2670    int32_t y_max = frame_settings.find(tag).data.i32[3];
2671    roi->weight = frame_settings.find(tag).data.i32[4];
2672    roi->rect.left = x_min;
2673    roi->rect.top = y_min;
2674    roi->rect.width = x_max - x_min;
2675    roi->rect.height = y_max - y_min;
2676}
2677
2678/*===========================================================================
2679 * FUNCTION   : resetIfNeededROI
2680 *
2681 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2682 *              crop region
2683 *
2684 * PARAMETERS :
2685 *   @roi       : cam_area_t struct to resize
2686 *   @scalerCropRegion : cam_crop_region_t region to compare against
2687 *
2688 *
2689 *==========================================================================*/
2690bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2691                                                 const cam_crop_region_t* scalerCropRegion)
2692{
2693    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2694    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2695    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2696    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2697    if ((roi_x_max < scalerCropRegion->left) ||
2698        (roi_y_max < scalerCropRegion->top)  ||
2699        (roi->rect.left > crop_x_max) ||
2700        (roi->rect.top > crop_y_max)){
2701        return false;
2702    }
2703    if (roi->rect.left < scalerCropRegion->left) {
2704        roi->rect.left = scalerCropRegion->left;
2705    }
2706    if (roi->rect.top < scalerCropRegion->top) {
2707        roi->rect.top = scalerCropRegion->top;
2708    }
2709    if (roi_x_max > crop_x_max) {
2710        roi_x_max = crop_x_max;
2711    }
2712    if (roi_y_max > crop_y_max) {
2713        roi_y_max = crop_y_max;
2714    }
2715    roi->rect.width = roi_x_max - roi->rect.left;
2716    roi->rect.height = roi_y_max - roi->rect.top;
2717    return true;
2718}
2719
2720/*===========================================================================
2721 * FUNCTION   : convertLandmarks
2722 *
2723 * DESCRIPTION: helper method to extract the landmarks from face detection info
2724 *
2725 * PARAMETERS :
2726 *   @face   : cam_rect_t struct to convert
2727 *   @landmarks : int32_t destination array
2728 *
2729 *
2730 *==========================================================================*/
2731void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2732{
2733    landmarks[0] = face.left_eye_center.x;
2734    landmarks[1] = face.left_eye_center.y;
2735    landmarks[2] = face.right_eye_center.x;
2736    landmarks[3] = face.right_eye_center.y;
2737    landmarks[4] = face.mouth_center.x;
2738    landmarks[5] = face.mouth_center.y;
2739}
2740
2741#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2742/*===========================================================================
2743 * FUNCTION   : initCapabilities
2744 *
2745 * DESCRIPTION: initialize camera capabilities in static data struct
2746 *
2747 * PARAMETERS :
2748 *   @cameraId  : camera Id
2749 *
2750 * RETURN     : int32_t type of status
2751 *              NO_ERROR  -- success
2752 *              none-zero failure code
2753 *==========================================================================*/
2754int QCamera3HardwareInterface::initCapabilities(int cameraId)
2755{
2756    int rc = 0;
2757    mm_camera_vtbl_t *cameraHandle = NULL;
2758    QCamera3HeapMemory *capabilityHeap = NULL;
2759
2760    cameraHandle = camera_open(cameraId);
2761    if (!cameraHandle) {
2762        ALOGE("%s: camera_open failed", __func__);
2763        rc = -1;
2764        goto open_failed;
2765    }
2766
2767    capabilityHeap = new QCamera3HeapMemory();
2768    if (capabilityHeap == NULL) {
2769        ALOGE("%s: creation of capabilityHeap failed", __func__);
2770        goto heap_creation_failed;
2771    }
2772    /* Allocate memory for capability buffer */
2773    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2774    if(rc != OK) {
2775        ALOGE("%s: No memory for cappability", __func__);
2776        goto allocate_failed;
2777    }
2778
2779    /* Map memory for capability buffer */
2780    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2781    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2782                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2783                                capabilityHeap->getFd(0),
2784                                sizeof(cam_capability_t));
2785    if(rc < 0) {
2786        ALOGE("%s: failed to map capability buffer", __func__);
2787        goto map_failed;
2788    }
2789
2790    /* Query Capability */
2791    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2792    if(rc < 0) {
2793        ALOGE("%s: failed to query capability",__func__);
2794        goto query_failed;
2795    }
2796    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2797    if (!gCamCapability[cameraId]) {
2798        ALOGE("%s: out of memory", __func__);
2799        goto query_failed;
2800    }
2801    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2802                                        sizeof(cam_capability_t));
2803    rc = 0;
2804
2805query_failed:
2806    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2807                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2808map_failed:
2809    capabilityHeap->deallocate();
2810allocate_failed:
2811    delete capabilityHeap;
2812heap_creation_failed:
2813    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2814    cameraHandle = NULL;
2815open_failed:
2816    return rc;
2817}
2818
2819/*===========================================================================
2820 * FUNCTION   : initParameters
2821 *
2822 * DESCRIPTION: initialize camera parameters
2823 *
2824 * PARAMETERS :
2825 *
2826 * RETURN     : int32_t type of status
2827 *              NO_ERROR  -- success
2828 *              none-zero failure code
2829 *==========================================================================*/
2830int QCamera3HardwareInterface::initParameters()
2831{
2832    int rc = 0;
2833
2834    //Allocate Set Param Buffer
2835    mParamHeap = new QCamera3HeapMemory();
2836    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
2837    if(rc != OK) {
2838        rc = NO_MEMORY;
2839        ALOGE("Failed to allocate SETPARM Heap memory");
2840        delete mParamHeap;
2841        mParamHeap = NULL;
2842        return rc;
2843    }
2844
2845    //Map memory for parameters buffer
2846    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
2847            CAM_MAPPING_BUF_TYPE_PARM_BUF,
2848            mParamHeap->getFd(0),
2849            sizeof(metadata_buffer_t));
2850    if(rc < 0) {
2851        ALOGE("%s:failed to map SETPARM buffer",__func__);
2852        rc = FAILED_TRANSACTION;
2853        mParamHeap->deallocate();
2854        delete mParamHeap;
2855        mParamHeap = NULL;
2856        return rc;
2857    }
2858
2859    mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
2860    return rc;
2861}
2862
2863/*===========================================================================
2864 * FUNCTION   : deinitParameters
2865 *
2866 * DESCRIPTION: de-initialize camera parameters
2867 *
2868 * PARAMETERS :
2869 *
2870 * RETURN     : NONE
2871 *==========================================================================*/
2872void QCamera3HardwareInterface::deinitParameters()
2873{
2874    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
2875            CAM_MAPPING_BUF_TYPE_PARM_BUF);
2876
2877    mParamHeap->deallocate();
2878    delete mParamHeap;
2879    mParamHeap = NULL;
2880
2881    mParameters = NULL;
2882}
2883
2884/*===========================================================================
2885 * FUNCTION   : calcMaxJpegSize
2886 *
2887 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
2888 *
2889 * PARAMETERS :
2890 *
2891 * RETURN     : max_jpeg_size
2892 *==========================================================================*/
2893int QCamera3HardwareInterface::calcMaxJpegSize()
2894{
2895    int32_t max_jpeg_size = 0;
2896    int temp_width, temp_height;
2897    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
2898        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
2899        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
2900        if (temp_width * temp_height > max_jpeg_size ) {
2901            max_jpeg_size = temp_width * temp_height;
2902        }
2903    }
2904    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2905    return max_jpeg_size;
2906}
2907
2908/*===========================================================================
2909 * FUNCTION   : initStaticMetadata
2910 *
2911 * DESCRIPTION: initialize the static metadata
2912 *
2913 * PARAMETERS :
2914 *   @cameraId  : camera Id
2915 *
2916 * RETURN     : int32_t type of status
2917 *              0  -- success
2918 *              non-zero failure code
2919 *==========================================================================*/
2920int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2921{
2922    int rc = 0;
2923    CameraMetadata staticInfo;
2924
2925    /* android.info: hardware level */
2926    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2927    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2928        &supportedHardwareLevel, 1);
2929
2930    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2931    /*HAL 3 only*/
2932    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2933                    &gCamCapability[cameraId]->min_focus_distance, 1);
2934
2935    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2936                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2937
2938    /*should be using focal lengths but sensor doesn't provide that info now*/
2939    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2940                      &gCamCapability[cameraId]->focal_length,
2941                      1);
2942
2943    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2944                      gCamCapability[cameraId]->apertures,
2945                      gCamCapability[cameraId]->apertures_count);
2946
2947    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2948                gCamCapability[cameraId]->filter_densities,
2949                gCamCapability[cameraId]->filter_densities_count);
2950
2951
2952    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2953                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2954                      gCamCapability[cameraId]->optical_stab_modes_count);
2955
2956    staticInfo.update(ANDROID_LENS_POSITION,
2957                      gCamCapability[cameraId]->lens_position,
2958                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2959
2960    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2961                                       gCamCapability[cameraId]->lens_shading_map_size.height};
2962    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2963                      lens_shading_map_size,
2964                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2965
2966    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2967            gCamCapability[cameraId]->sensor_physical_size, 2);
2968
2969    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2970            gCamCapability[cameraId]->exposure_time_range, 2);
2971
2972    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2973            &gCamCapability[cameraId]->max_frame_duration, 1);
2974
2975    camera_metadata_rational baseGainFactor = {
2976            gCamCapability[cameraId]->base_gain_factor.numerator,
2977            gCamCapability[cameraId]->base_gain_factor.denominator};
2978    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
2979                      &baseGainFactor, 1);
2980
2981    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2982                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2983
2984    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2985                                  gCamCapability[cameraId]->pixel_array_size.height};
2986    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2987                      pixel_array_size, 2);
2988
2989    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
2990                                                gCamCapability[cameraId]->active_array_size.top,
2991                                                gCamCapability[cameraId]->active_array_size.width,
2992                                                gCamCapability[cameraId]->active_array_size.height};
2993    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2994                      active_array_size, 4);
2995
2996    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2997            &gCamCapability[cameraId]->white_level, 1);
2998
2999    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
3000            gCamCapability[cameraId]->black_level_pattern, 4);
3001
3002    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
3003                      &gCamCapability[cameraId]->flash_charge_duration, 1);
3004
3005    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
3006                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
3007
3008    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
3009    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
3010                      (int32_t*)&maxFaces, 1);
3011
3012    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3013                      &gCamCapability[cameraId]->histogram_size, 1);
3014
3015    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3016            &gCamCapability[cameraId]->max_histogram_count, 1);
3017
3018    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
3019                                    gCamCapability[cameraId]->sharpness_map_size.height};
3020
3021    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
3022            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
3023
3024    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3025            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
3026
3027    int32_t scalar_formats[] = {
3028            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
3029            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
3030            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
3031            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
3032            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
3033    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
3034    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
3035                      scalar_formats,
3036                      scalar_formats_count);
3037
3038    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
3039    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
3040              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
3041              available_processed_sizes);
3042    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
3043                available_processed_sizes,
3044                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
3045
3046    int32_t available_raw_sizes[CAM_FORMAT_MAX * 2];
3047    makeTable(gCamCapability[cameraId]->raw_dim,
3048              gCamCapability[cameraId]->supported_raw_dim_cnt,
3049              available_raw_sizes);
3050    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
3051                available_raw_sizes,
3052                gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
3053
3054    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
3055    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
3056                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
3057                 available_fps_ranges);
3058    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3059            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
3060
3061    camera_metadata_rational exposureCompensationStep = {
3062            gCamCapability[cameraId]->exp_compensation_step.numerator,
3063            gCamCapability[cameraId]->exp_compensation_step.denominator};
3064    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
3065                      &exposureCompensationStep, 1);
3066
3067    /*TO DO*/
3068    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
3069    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3070                      availableVstabModes, sizeof(availableVstabModes));
3071
3072    /** Quirk for urgent 3A state until final interface is worked out */
3073    uint8_t usePartialResultQuirk = 1;
3074    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
3075                      &usePartialResultQuirk, 1);
3076
3077    /*HAL 1 and HAL 3 common*/
3078    float maxZoom = 4;
3079    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3080            &maxZoom, 1);
3081
3082    int32_t max3aRegions[] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
3083    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
3084            max3aRegions, 3);
3085
3086    uint8_t availableFaceDetectModes[] = {
3087            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
3088            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
3089    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3090                      availableFaceDetectModes,
3091                      sizeof(availableFaceDetectModes));
3092
3093    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
3094                                           gCamCapability[cameraId]->exposure_compensation_max};
3095    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
3096            exposureCompensationRange,
3097            sizeof(exposureCompensationRange)/sizeof(int32_t));
3098
3099    uint8_t lensFacing = (facingBack) ?
3100            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
3101    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
3102
3103    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
3104                available_processed_sizes,
3105                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
3106
3107    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3108                      available_thumbnail_sizes,
3109                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
3110
3111    /*android.scaler.availableStreamConfigurations*/
3112    int32_t max_stream_configs_size =
3113            gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3114            sizeof(scalar_formats)/sizeof(int32_t) * 4;
3115    int32_t available_stream_configs[max_stream_configs_size];
3116    int idx = 0;
3117    for (int j = 0; j < scalar_formats_count; j++) {
3118        switch (scalar_formats[j]) {
3119        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3120        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3121            for (int i = 0;
3122                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3123                available_stream_configs[idx] = scalar_formats[j];
3124                available_stream_configs[idx+1] =
3125                    gCamCapability[cameraId]->raw_dim[i].width;
3126                available_stream_configs[idx+2] =
3127                    gCamCapability[cameraId]->raw_dim[i].height;
3128                available_stream_configs[idx+3] =
3129                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3130                idx+=4;
3131            }
3132            break;
3133        default:
3134            for (int i = 0;
3135                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3136                available_stream_configs[idx] = scalar_formats[j];
3137                available_stream_configs[idx+1] =
3138                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3139                available_stream_configs[idx+2] =
3140                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3141                available_stream_configs[idx+3] =
3142                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3143                idx+=4;
3144            }
3145
3146
3147            break;
3148        }
3149    }
3150    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3151                      available_stream_configs, idx);
3152
3153    /* android.scaler.availableMinFrameDurations */
3154    int64_t available_min_durations[max_stream_configs_size];
3155    idx = 0;
3156    for (int j = 0; j < scalar_formats_count; j++) {
3157        switch (scalar_formats[j]) {
3158        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3159        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3160            for (int i = 0;
3161                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3162                available_min_durations[idx] = scalar_formats[j];
3163                available_min_durations[idx+1] =
3164                    gCamCapability[cameraId]->raw_dim[i].width;
3165                available_min_durations[idx+2] =
3166                    gCamCapability[cameraId]->raw_dim[i].height;
3167                available_min_durations[idx+3] =
3168                    gCamCapability[cameraId]->raw_min_duration[i];
3169                idx+=4;
3170            }
3171            break;
3172        default:
3173            for (int i = 0;
3174                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3175                available_min_durations[idx] = scalar_formats[j];
3176                available_min_durations[idx+1] =
3177                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3178                available_min_durations[idx+2] =
3179                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3180                available_min_durations[idx+3] =
3181                    gCamCapability[cameraId]->picture_min_duration[i];
3182                idx+=4;
3183            }
3184            break;
3185        }
3186    }
3187    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3188                      &available_min_durations[0], idx);
3189
3190    int32_t max_jpeg_size = 0;
3191    int temp_width, temp_height;
3192    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3193        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3194        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3195        if (temp_width * temp_height > max_jpeg_size ) {
3196            max_jpeg_size = temp_width * temp_height;
3197        }
3198    }
3199    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3200    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
3201                      &max_jpeg_size, 1);
3202
3203    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
3204    size_t size = 0;
3205    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
3206        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
3207                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
3208                                   gCamCapability[cameraId]->supported_effects[i]);
3209        if (val != NAME_NOT_FOUND) {
3210            avail_effects[size] = (uint8_t)val;
3211            size++;
3212        }
3213    }
3214    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
3215                      avail_effects,
3216                      size);
3217
3218    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
3219    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
3220    int32_t supported_scene_modes_cnt = 0;
3221    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3222        int32_t val = lookupFwkName(SCENE_MODES_MAP,
3223                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3224                                gCamCapability[cameraId]->supported_scene_modes[i]);
3225        if (val != NAME_NOT_FOUND) {
3226            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3227            supported_indexes[supported_scene_modes_cnt] = i;
3228            supported_scene_modes_cnt++;
3229        }
3230    }
3231
3232    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3233                      avail_scene_modes,
3234                      supported_scene_modes_cnt);
3235
3236    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3237    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3238                      supported_scene_modes_cnt,
3239                      scene_mode_overrides,
3240                      supported_indexes,
3241                      cameraId);
3242    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3243                      scene_mode_overrides,
3244                      supported_scene_modes_cnt*3);
3245
3246    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3247    size = 0;
3248    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3249        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3250                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3251                                 gCamCapability[cameraId]->supported_antibandings[i]);
3252        if (val != NAME_NOT_FOUND) {
3253            avail_antibanding_modes[size] = (uint8_t)val;
3254            size++;
3255        }
3256
3257    }
3258    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3259                      avail_antibanding_modes,
3260                      size);
3261
3262    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3263    size = 0;
3264    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3265        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3266                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3267                                gCamCapability[cameraId]->supported_focus_modes[i]);
3268        if (val != NAME_NOT_FOUND) {
3269            avail_af_modes[size] = (uint8_t)val;
3270            size++;
3271        }
3272    }
3273    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3274                      avail_af_modes,
3275                      size);
3276
3277    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3278    size = 0;
3279    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3280        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3281                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3282                                    gCamCapability[cameraId]->supported_white_balances[i]);
3283        if (val != NAME_NOT_FOUND) {
3284            avail_awb_modes[size] = (uint8_t)val;
3285            size++;
3286        }
3287    }
3288    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3289                      avail_awb_modes,
3290                      size);
3291
3292    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3293    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3294      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3295
3296    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3297            available_flash_levels,
3298            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3299
3300    uint8_t flashAvailable;
3301    if (gCamCapability[cameraId]->flash_available)
3302        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3303    else
3304        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3305    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3306            &flashAvailable, 1);
3307
3308    uint8_t avail_ae_modes[5];
3309    size = 0;
3310    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3311        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3312        size++;
3313    }
3314    if (flashAvailable) {
3315        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3316        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3317        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3318    }
3319    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3320                      avail_ae_modes,
3321                      size);
3322
3323    int32_t sensitivity_range[2];
3324    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3325    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3326    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3327                      sensitivity_range,
3328                      sizeof(sensitivity_range) / sizeof(int32_t));
3329
3330    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3331                      &gCamCapability[cameraId]->max_analog_sensitivity,
3332                      1);
3333
3334    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3335    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3336                      &sensor_orientation,
3337                      1);
3338
3339    int32_t max_output_streams[3] = {1, 3, 1};
3340    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3341                      max_output_streams,
3342                      3);
3343
3344    uint8_t avail_leds = 0;
3345    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3346                      &avail_leds, 0);
3347
3348    uint8_t focus_dist_calibrated;
3349    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3350            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3351            gCamCapability[cameraId]->focus_dist_calibrated);
3352    if (val != NAME_NOT_FOUND) {
3353        focus_dist_calibrated = (uint8_t)val;
3354        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3355                     &focus_dist_calibrated, 1);
3356    }
3357
3358    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3359    size = 0;
3360    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3361            i++) {
3362        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3363                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3364                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3365        if (val != NAME_NOT_FOUND) {
3366            avail_testpattern_modes[size] = val;
3367            size++;
3368        }
3369    }
3370    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3371                      avail_testpattern_modes,
3372                      size);
3373
3374    uint8_t max_pipeline_depth = kMaxInFlight;
3375    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3376                      &max_pipeline_depth,
3377                      1);
3378
3379    int32_t partial_result_count = 2;
3380    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3381                      &partial_result_count,
3382                       1);
3383
3384    uint8_t available_capabilities[] =
3385        {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
3386         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
3387         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM};
3388    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3389                      available_capabilities,
3390                      3);
3391
3392    int32_t max_input_streams = 0;
3393    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3394                      &max_input_streams,
3395                      1);
3396
3397    int32_t io_format_map[] = {};
3398    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3399                      io_format_map, 0);
3400
3401    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
3402    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3403                      &max_latency,
3404                      1);
3405
3406    float optical_axis_angle[2];
3407    optical_axis_angle[0] = 0; //need to verify
3408    optical_axis_angle[1] = 0; //need to verify
3409    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3410                      optical_axis_angle,
3411                      2);
3412
3413    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3414    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3415                      available_hot_pixel_modes,
3416                      1);
3417
3418    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3419                                      ANDROID_EDGE_MODE_FAST};
3420    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3421                      available_edge_modes,
3422                      2);
3423
3424    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3425                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
3426    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3427                      available_noise_red_modes,
3428                      2);
3429
3430    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3431                                         ANDROID_TONEMAP_MODE_FAST,
3432                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
3433    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3434                      available_tonemap_modes,
3435                      3);
3436
3437    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3438    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3439                      available_hot_pixel_map_modes,
3440                      1);
3441
3442    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
3443       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3444       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3445       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3446       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3447       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3448       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
3449       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3450       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3451       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3452       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3453       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3454       ANDROID_JPEG_GPS_COORDINATES,
3455       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3456       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3457       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3458       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3459       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3460       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3461       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3462       ANDROID_SENSOR_FRAME_DURATION,
3463       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3464       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3465       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3466       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3467       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3468       ANDROID_BLACK_LEVEL_LOCK };
3469    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3470                      available_request_keys,
3471                      sizeof(available_request_keys)/sizeof(int32_t));
3472
3473    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3474       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3475       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
3476       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3477       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3478       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3479       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3480       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3481       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3482       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3483       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3484       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
3485       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3486       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_FORWARD_MATRIX,
3487       ANDROID_SENSOR_COLOR_TRANSFORM, ANDROID_SENSOR_CALIBRATION_TRANSFORM,
3488       ANDROID_SENSOR_SENSITIVITY, ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3489       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
3490       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3491       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
3492       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3493       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3494       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
3495       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
3496       ANDROID_STATISTICS_FACE_SCORES};
3497    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3498                      available_result_keys,
3499                      sizeof(available_result_keys)/sizeof(int32_t));
3500
3501    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3502       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3503       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
3504       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
3505       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3506       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3507       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
3508       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
3509       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3510       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3511       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3512       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3513       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3514       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3515       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3516       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
3517       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3518       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3519       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3520       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3521       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3522       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3523       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3524       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
3525       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3526       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3527       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3528       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3529       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
3530       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3531       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3532       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3533       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3534       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3535       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3536       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3537       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3538       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3539       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3540       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3541       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
3542    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
3543                      available_characteristics_keys,
3544                      sizeof(available_characteristics_keys)/sizeof(int32_t));
3545
3546    gStaticMetadata[cameraId] = staticInfo.release();
3547    return rc;
3548}
3549
3550/*===========================================================================
3551 * FUNCTION   : makeTable
3552 *
3553 * DESCRIPTION: make a table of sizes
3554 *
3555 * PARAMETERS :
3556 *
3557 *
3558 *==========================================================================*/
3559void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
3560                                          int32_t* sizeTable)
3561{
3562    int j = 0;
3563    for (int i = 0; i < size; i++) {
3564        sizeTable[j] = dimTable[i].width;
3565        sizeTable[j+1] = dimTable[i].height;
3566        j+=2;
3567    }
3568}
3569
3570/*===========================================================================
3571 * FUNCTION   : makeFPSTable
3572 *
3573 * DESCRIPTION: make a table of fps ranges
3574 *
3575 * PARAMETERS :
3576 *
3577 *==========================================================================*/
3578void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
3579                                          int32_t* fpsRangesTable)
3580{
3581    int j = 0;
3582    for (int i = 0; i < size; i++) {
3583        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
3584        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
3585        j+=2;
3586    }
3587}
3588
3589/*===========================================================================
3590 * FUNCTION   : makeOverridesList
3591 *
3592 * DESCRIPTION: make a list of scene mode overrides
3593 *
3594 * PARAMETERS :
3595 *
3596 *
3597 *==========================================================================*/
3598void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
3599                                                  uint8_t size, uint8_t* overridesList,
3600                                                  uint8_t* supported_indexes,
3601                                                  int camera_id)
3602{
3603    /*daemon will give a list of overrides for all scene modes.
3604      However we should send the fwk only the overrides for the scene modes
3605      supported by the framework*/
3606    int j = 0, index = 0, supt = 0;
3607    uint8_t focus_override;
3608    for (int i = 0; i < size; i++) {
3609        supt = 0;
3610        index = supported_indexes[i];
3611        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
3612        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3613                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3614                                                    overridesTable[index].awb_mode);
3615        focus_override = (uint8_t)overridesTable[index].af_mode;
3616        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
3617           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
3618              supt = 1;
3619              break;
3620           }
3621        }
3622        if (supt) {
3623           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3624                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3625                                              focus_override);
3626        } else {
3627           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
3628        }
3629        j+=3;
3630    }
3631}
3632
3633/*===========================================================================
3634 * FUNCTION   : getPreviewHalPixelFormat
3635 *
3636 * DESCRIPTION: convert the format to type recognized by framework
3637 *
3638 * PARAMETERS : format : the format from backend
3639 *
3640 ** RETURN    : format recognized by framework
3641 *
3642 *==========================================================================*/
3643int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
3644{
3645    int32_t halPixelFormat;
3646
3647    switch (format) {
3648    case CAM_FORMAT_YUV_420_NV12:
3649        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
3650        break;
3651    case CAM_FORMAT_YUV_420_NV21:
3652        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3653        break;
3654    case CAM_FORMAT_YUV_420_NV21_ADRENO:
3655        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
3656        break;
3657    case CAM_FORMAT_YUV_420_YV12:
3658        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
3659        break;
3660    case CAM_FORMAT_YUV_422_NV16:
3661    case CAM_FORMAT_YUV_422_NV61:
3662    default:
3663        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3664        break;
3665    }
3666    return halPixelFormat;
3667}
3668
3669/*===========================================================================
3670 * FUNCTION   : getSensorSensitivity
3671 *
3672 * DESCRIPTION: convert iso_mode to an integer value
3673 *
3674 * PARAMETERS : iso_mode : the iso_mode supported by sensor
3675 *
3676 ** RETURN    : sensitivity supported by sensor
3677 *
3678 *==========================================================================*/
3679int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
3680{
3681    int32_t sensitivity;
3682
3683    switch (iso_mode) {
3684    case CAM_ISO_MODE_100:
3685        sensitivity = 100;
3686        break;
3687    case CAM_ISO_MODE_200:
3688        sensitivity = 200;
3689        break;
3690    case CAM_ISO_MODE_400:
3691        sensitivity = 400;
3692        break;
3693    case CAM_ISO_MODE_800:
3694        sensitivity = 800;
3695        break;
3696    case CAM_ISO_MODE_1600:
3697        sensitivity = 1600;
3698        break;
3699    default:
3700        sensitivity = -1;
3701        break;
3702    }
3703    return sensitivity;
3704}
3705
3706/*===========================================================================
3707 * FUNCTION   : AddSetMetaEntryToBatch
3708 *
3709 * DESCRIPTION: add set parameter entry into batch
3710 *
3711 * PARAMETERS :
3712 *   @p_table     : ptr to parameter buffer
3713 *   @paramType   : parameter type
3714 *   @paramLength : length of parameter value
3715 *   @paramValue  : ptr to parameter value
3716 *
3717 * RETURN     : int32_t type of status
3718 *              NO_ERROR  -- success
3719 *              none-zero failure code
3720 *==========================================================================*/
3721int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
3722                                                          unsigned int paramType,
3723                                                          uint32_t paramLength,
3724                                                          void *paramValue)
3725{
3726    int position = paramType;
3727    int current, next;
3728
3729    /*************************************************************************
3730    *                 Code to take care of linking next flags                *
3731    *************************************************************************/
3732    current = GET_FIRST_PARAM_ID(p_table);
3733    if (position == current){
3734        //DO NOTHING
3735    } else if (position < current){
3736        SET_NEXT_PARAM_ID(position, p_table, current);
3737        SET_FIRST_PARAM_ID(p_table, position);
3738    } else {
3739        /* Search for the position in the linked list where we need to slot in*/
3740        while (position > GET_NEXT_PARAM_ID(current, p_table))
3741            current = GET_NEXT_PARAM_ID(current, p_table);
3742
3743        /*If node already exists no need to alter linking*/
3744        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
3745            next = GET_NEXT_PARAM_ID(current, p_table);
3746            SET_NEXT_PARAM_ID(current, p_table, position);
3747            SET_NEXT_PARAM_ID(position, p_table, next);
3748        }
3749    }
3750
3751    /*************************************************************************
3752    *                   Copy contents into entry                             *
3753    *************************************************************************/
3754
3755    if (paramLength > sizeof(parm_type_t)) {
3756        ALOGE("%s:Size of input larger than max entry size",__func__);
3757        return BAD_VALUE;
3758    }
3759    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
3760    SET_PARM_VALID_BIT(paramType,p_table,1);
3761    return NO_ERROR;
3762}
3763
3764/*===========================================================================
3765 * FUNCTION   : lookupFwkName
3766 *
3767 * DESCRIPTION: In case the enum is not same in fwk and backend
3768 *              make sure the parameter is correctly propogated
3769 *
3770 * PARAMETERS  :
3771 *   @arr      : map between the two enums
3772 *   @len      : len of the map
3773 *   @hal_name : name of the hal_parm to map
3774 *
3775 * RETURN     : int type of status
3776 *              fwk_name  -- success
3777 *              none-zero failure code
3778 *==========================================================================*/
3779int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
3780                                             int len, int hal_name)
3781{
3782
3783    for (int i = 0; i < len; i++) {
3784        if (arr[i].hal_name == hal_name)
3785            return arr[i].fwk_name;
3786    }
3787
3788    /* Not able to find matching framework type is not necessarily
3789     * an error case. This happens when mm-camera supports more attributes
3790     * than the frameworks do */
3791    ALOGD("%s: Cannot find matching framework type", __func__);
3792    return NAME_NOT_FOUND;
3793}
3794
3795/*===========================================================================
3796 * FUNCTION   : lookupHalName
3797 *
3798 * DESCRIPTION: In case the enum is not same in fwk and backend
3799 *              make sure the parameter is correctly propogated
3800 *
3801 * PARAMETERS  :
3802 *   @arr      : map between the two enums
3803 *   @len      : len of the map
3804 *   @fwk_name : name of the hal_parm to map
3805 *
3806 * RETURN     : int32_t type of status
3807 *              hal_name  -- success
3808 *              none-zero failure code
3809 *==========================================================================*/
3810int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
3811                                             int len, unsigned int fwk_name)
3812{
3813    for (int i = 0; i < len; i++) {
3814       if (arr[i].fwk_name == fwk_name)
3815           return arr[i].hal_name;
3816    }
3817    ALOGE("%s: Cannot find matching hal type", __func__);
3818    return NAME_NOT_FOUND;
3819}
3820
3821/*===========================================================================
3822 * FUNCTION   : getCapabilities
3823 *
3824 * DESCRIPTION: query camera capabilities
3825 *
3826 * PARAMETERS :
3827 *   @cameraId  : camera Id
3828 *   @info      : camera info struct to be filled in with camera capabilities
3829 *
3830 * RETURN     : int32_t type of status
3831 *              NO_ERROR  -- success
3832 *              none-zero failure code
3833 *==========================================================================*/
3834int QCamera3HardwareInterface::getCamInfo(int cameraId,
3835                                    struct camera_info *info)
3836{
3837    int rc = 0;
3838
3839    if (NULL == gCamCapability[cameraId]) {
3840        rc = initCapabilities(cameraId);
3841        if (rc < 0) {
3842            //pthread_mutex_unlock(&g_camlock);
3843            return rc;
3844        }
3845    }
3846
3847    if (NULL == gStaticMetadata[cameraId]) {
3848        rc = initStaticMetadata(cameraId);
3849        if (rc < 0) {
3850            return rc;
3851        }
3852    }
3853
3854    switch(gCamCapability[cameraId]->position) {
3855    case CAM_POSITION_BACK:
3856        info->facing = CAMERA_FACING_BACK;
3857        break;
3858
3859    case CAM_POSITION_FRONT:
3860        info->facing = CAMERA_FACING_FRONT;
3861        break;
3862
3863    default:
3864        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
3865        rc = -1;
3866        break;
3867    }
3868
3869
3870    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
3871    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
3872    info->static_camera_characteristics = gStaticMetadata[cameraId];
3873
3874    return rc;
3875}
3876
3877/*===========================================================================
3878 * FUNCTION   : translateCapabilityToMetadata
3879 *
3880 * DESCRIPTION: translate the capability into camera_metadata_t
3881 *
3882 * PARAMETERS : type of the request
3883 *
3884 *
3885 * RETURN     : success: camera_metadata_t*
3886 *              failure: NULL
3887 *
3888 *==========================================================================*/
3889camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
3890{
3891    pthread_mutex_lock(&mMutex);
3892
3893    if (mDefaultMetadata[type] != NULL) {
3894        pthread_mutex_unlock(&mMutex);
3895        return mDefaultMetadata[type];
3896    }
3897    //first time we are handling this request
3898    //fill up the metadata structure using the wrapper class
3899    CameraMetadata settings;
3900    //translate from cam_capability_t to camera_metadata_tag_t
3901    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
3902    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
3903    int32_t defaultRequestID = 0;
3904    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
3905
3906    uint8_t controlIntent = 0;
3907    uint8_t focusMode;
3908    switch (type) {
3909      case CAMERA3_TEMPLATE_PREVIEW:
3910        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
3911        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3912        break;
3913      case CAMERA3_TEMPLATE_STILL_CAPTURE:
3914        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
3915        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3916        break;
3917      case CAMERA3_TEMPLATE_VIDEO_RECORD:
3918        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
3919        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3920        break;
3921      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
3922        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
3923        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
3924        break;
3925      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
3926        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
3927        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3928        break;
3929      case CAMERA3_TEMPLATE_MANUAL:
3930        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
3931        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3932        break;
3933      default:
3934        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
3935        break;
3936    }
3937    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
3938
3939    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
3940        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
3941    }
3942    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
3943
3944    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3945            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
3946
3947    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
3948    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3949
3950    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
3951    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
3952
3953    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
3954    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
3955
3956    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
3957    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
3958
3959    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
3960    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
3961
3962    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
3963    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
3964
3965    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
3966    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
3967
3968    /*flash*/
3969    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
3970    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
3971
3972    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
3973    settings.update(ANDROID_FLASH_FIRING_POWER,
3974            &flashFiringLevel, 1);
3975
3976    /* lens */
3977    float default_aperture = gCamCapability[mCameraId]->apertures[0];
3978    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
3979
3980    if (gCamCapability[mCameraId]->filter_densities_count) {
3981        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
3982        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
3983                        gCamCapability[mCameraId]->filter_densities_count);
3984    }
3985
3986    float default_focal_length = gCamCapability[mCameraId]->focal_length;
3987    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
3988
3989    float default_focus_distance = 0;
3990    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
3991
3992    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
3993    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
3994
3995    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
3996    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
3997
3998    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
3999    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
4000
4001    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
4002    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
4003
4004    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
4005    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
4006
4007    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
4008    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
4009
4010    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4011    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4012
4013    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4014    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
4015
4016    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4017    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
4018
4019    /* Exposure time(Update the Min Exposure Time)*/
4020    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
4021    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
4022
4023    /* frame duration */
4024    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
4025    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
4026
4027    /* sensitivity */
4028    static const int32_t default_sensitivity = 100;
4029    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
4030
4031    /*edge mode*/
4032    static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
4033    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
4034
4035    /*noise reduction mode*/
4036    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
4037    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
4038
4039    /*color correction mode*/
4040    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
4041    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
4042
4043    /*transform matrix mode*/
4044    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
4045    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
4046
4047    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
4048    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
4049
4050    int32_t scaler_crop_region[4];
4051    scaler_crop_region[0] = 0;
4052    scaler_crop_region[1] = 0;
4053    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
4054    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
4055    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
4056
4057    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
4058    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
4059
4060    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4061    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
4062
4063    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
4064                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
4065                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
4066    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
4067
4068    /*focus distance*/
4069    float focus_distance = 0.0;
4070    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
4071
4072    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
4073    float max_range = 0.0;
4074    float max_fixed_fps = 0.0;
4075    int32_t fps_range[2] = {0, 0};
4076    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
4077            i++) {
4078        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
4079            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4080        if (type == CAMERA3_TEMPLATE_PREVIEW ||
4081                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
4082                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
4083            if (range > max_range) {
4084                fps_range[0] =
4085                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4086                fps_range[1] =
4087                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4088                max_range = range;
4089            }
4090        } else {
4091            if (range < 0.01 && max_fixed_fps <
4092                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
4093                fps_range[0] =
4094                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4095                fps_range[1] =
4096                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4097                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4098            }
4099        }
4100    }
4101    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
4102
4103    /*precapture trigger*/
4104    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
4105    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
4106
4107    /*af trigger*/
4108    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
4109    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
4110
4111    /* ae & af regions */
4112    int32_t active_region[] = {
4113            gCamCapability[mCameraId]->active_array_size.left,
4114            gCamCapability[mCameraId]->active_array_size.top,
4115            gCamCapability[mCameraId]->active_array_size.left +
4116                    gCamCapability[mCameraId]->active_array_size.width,
4117            gCamCapability[mCameraId]->active_array_size.top +
4118                    gCamCapability[mCameraId]->active_array_size.height,
4119            1};
4120    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
4121    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
4122
4123    /* black level lock */
4124    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4125    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
4126
4127    /* face detect mode */
4128    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
4129    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
4130
4131    /* lens shading map mode */
4132    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4133    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
4134
4135    //special defaults for manual template
4136    if (type == CAMERA3_TEMPLATE_MANUAL) {
4137        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
4138        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
4139
4140        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
4141        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
4142
4143        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
4144        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
4145
4146        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
4147        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
4148
4149        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
4150        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
4151
4152        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
4153        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
4154    }
4155    mDefaultMetadata[type] = settings.release();
4156
4157    pthread_mutex_unlock(&mMutex);
4158    return mDefaultMetadata[type];
4159}
4160
4161/*===========================================================================
4162 * FUNCTION   : setFrameParameters
4163 *
4164 * DESCRIPTION: set parameters per frame as requested in the metadata from
4165 *              framework
4166 *
4167 * PARAMETERS :
4168 *   @request   : request that needs to be serviced
4169 *   @streamID : Stream ID of all the requested streams
4170 *
4171 * RETURN     : success: NO_ERROR
4172 *              failure:
4173 *==========================================================================*/
4174int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
4175                    cam_stream_ID_t streamID)
4176{
4177    /*translate from camera_metadata_t type to parm_type_t*/
4178    int rc = 0;
4179    int32_t hal_version = CAM_HAL_V3;
4180
4181    memset(mParameters, 0, sizeof(metadata_buffer_t));
4182    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
4183    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
4184                sizeof(hal_version), &hal_version);
4185    if (rc < 0) {
4186        ALOGE("%s: Failed to set hal version in the parameters", __func__);
4187        return BAD_VALUE;
4188    }
4189
4190    /*we need to update the frame number in the parameters*/
4191    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
4192                                sizeof(request->frame_number), &(request->frame_number));
4193    if (rc < 0) {
4194        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4195        return BAD_VALUE;
4196    }
4197
4198    /* Update stream id of all the requested buffers */
4199    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
4200                                sizeof(cam_stream_ID_t), &streamID);
4201
4202    if (rc < 0) {
4203        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
4204        return BAD_VALUE;
4205    }
4206
4207    if(request->settings != NULL){
4208        rc = translateToHalMetadata(request, mParameters);
4209    }
4210
4211    /*set the parameters to backend*/
4212    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
4213    return rc;
4214}
4215
4216/*===========================================================================
4217 * FUNCTION   : setReprocParameters
4218 *
4219 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
4220 *              queue it to picture channel for reprocessing.
4221 *
4222 * PARAMETERS :
4223 *   @request   : request that needs to be serviced
4224 *
4225 * RETURN     : success: NO_ERROR
4226 *              failure: non zero failure code
4227 *==========================================================================*/
4228int QCamera3HardwareInterface::setReprocParameters(camera3_capture_request_t *request)
4229{
4230    /*translate from camera_metadata_t type to parm_type_t*/
4231    int rc = 0;
4232    metadata_buffer_t *reprocParam = NULL;
4233
4234    if(request->settings != NULL){
4235        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
4236        return BAD_VALUE;
4237    }
4238    reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
4239    if (!reprocParam) {
4240        ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
4241        return NO_MEMORY;
4242    }
4243    memset(reprocParam, 0, sizeof(metadata_buffer_t));
4244    reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
4245
4246    /*we need to update the frame number in the parameters*/
4247    rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
4248                                sizeof(request->frame_number), &(request->frame_number));
4249    if (rc < 0) {
4250        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4251        return BAD_VALUE;
4252    }
4253
4254
4255    rc = translateToHalMetadata(request, reprocParam);
4256    if (rc < 0) {
4257        ALOGE("%s: Failed to translate reproc request", __func__);
4258        delete reprocParam;
4259        return rc;
4260    }
4261    /*queue metadata for reprocessing*/
4262    rc = mPictureChannel->queueReprocMetadata(reprocParam);
4263    if (rc < 0) {
4264        ALOGE("%s: Failed to queue reprocessing metadata", __func__);
4265        delete reprocParam;
4266    }
4267
4268    return rc;
4269}
4270
4271/*===========================================================================
4272 * FUNCTION   : translateToHalMetadata
4273 *
4274 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
4275 *
4276 *
4277 * PARAMETERS :
4278 *   @request  : request sent from framework
4279 *
4280 *
4281 * RETURN     : success: NO_ERROR
4282 *              failure:
4283 *==========================================================================*/
4284int QCamera3HardwareInterface::translateToHalMetadata
4285                                  (const camera3_capture_request_t *request,
4286                                   metadata_buffer_t *hal_metadata)
4287{
4288    int rc = 0;
4289    CameraMetadata frame_settings;
4290    frame_settings = request->settings;
4291
4292    /* Do not change the order of the following list unless you know what you are
4293     * doing.
4294     * The order is laid out in such a way that parameters in the front of the table
4295     * may be used to override the parameters later in the table. Examples are:
4296     * 1. META_MODE should precede AEC/AWB/AF MODE
4297     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
4298     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
4299     * 4. Any mode should precede it's corresponding settings
4300     */
4301    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4302        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4303        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
4304                sizeof(metaMode), &metaMode);
4305        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4306           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4307           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4308                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4309                                             fwk_sceneMode);
4310           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4311                sizeof(sceneMode), &sceneMode);
4312        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4313           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4314           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4315                sizeof(sceneMode), &sceneMode);
4316        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4317           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4318           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4319                sizeof(sceneMode), &sceneMode);
4320        }
4321    }
4322
4323    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4324        uint8_t fwk_aeMode =
4325            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4326        uint8_t aeMode;
4327        int32_t redeye;
4328
4329        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4330            aeMode = CAM_AE_MODE_OFF;
4331        } else {
4332            aeMode = CAM_AE_MODE_ON;
4333        }
4334        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4335            redeye = 1;
4336        } else {
4337            redeye = 0;
4338        }
4339
4340        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
4341                                          sizeof(AE_FLASH_MODE_MAP),
4342                                          fwk_aeMode);
4343        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
4344                sizeof(aeMode), &aeMode);
4345        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4346                sizeof(flashMode), &flashMode);
4347        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
4348                sizeof(redeye), &redeye);
4349    }
4350
4351    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
4352        uint8_t fwk_whiteLevel =
4353            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
4354        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
4355                sizeof(WHITE_BALANCE_MODES_MAP),
4356                fwk_whiteLevel);
4357        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
4358                sizeof(whiteLevel), &whiteLevel);
4359    }
4360
4361    float focalDistance = -1.0;
4362    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
4363        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
4364        rc = AddSetMetaEntryToBatch(hal_metadata,
4365                CAM_INTF_META_LENS_FOCUS_DISTANCE,
4366                sizeof(focalDistance), &focalDistance);
4367    }
4368
4369    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
4370        uint8_t fwk_focusMode =
4371            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
4372        uint8_t focusMode;
4373        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
4374            focusMode = CAM_FOCUS_MODE_INFINITY;
4375        } else{
4376         focusMode = lookupHalName(FOCUS_MODES_MAP,
4377                                   sizeof(FOCUS_MODES_MAP),
4378                                   fwk_focusMode);
4379        }
4380        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
4381                sizeof(focusMode), &focusMode);
4382    }
4383
4384    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
4385        int32_t antibandingMode =
4386            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
4387        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
4388                sizeof(antibandingMode), &antibandingMode);
4389    }
4390
4391    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4392        int32_t expCompensation = frame_settings.find(
4393            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4394        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4395            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4396        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4397            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4398        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4399          sizeof(expCompensation), &expCompensation);
4400    }
4401
4402    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4403        int32_t expCompensation = frame_settings.find(
4404            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4405        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4406            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4407        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4408            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4409        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4410          sizeof(expCompensation), &expCompensation);
4411    }
4412
4413    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
4414        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
4415        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
4416                sizeof(aeLock), &aeLock);
4417    }
4418    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4419        cam_fps_range_t fps_range;
4420        fps_range.min_fps =
4421            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
4422        fps_range.max_fps =
4423            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4424        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
4425                sizeof(fps_range), &fps_range);
4426    }
4427
4428    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
4429        uint8_t awbLock =
4430            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
4431        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
4432                sizeof(awbLock), &awbLock);
4433    }
4434
4435    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
4436        uint8_t fwk_effectMode =
4437            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
4438        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
4439                sizeof(EFFECT_MODES_MAP),
4440                fwk_effectMode);
4441        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
4442                sizeof(effectMode), &effectMode);
4443    }
4444
4445    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
4446        uint8_t colorCorrectMode =
4447            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
4448        rc =
4449            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
4450                    sizeof(colorCorrectMode), &colorCorrectMode);
4451    }
4452
4453    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
4454        cam_color_correct_gains_t colorCorrectGains;
4455        for (int i = 0; i < 4; i++) {
4456            colorCorrectGains.gains[i] =
4457                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
4458        }
4459        rc =
4460            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
4461                    sizeof(colorCorrectGains), &colorCorrectGains);
4462    }
4463
4464    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
4465        cam_color_correct_matrix_t colorCorrectTransform;
4466        cam_rational_type_t transform_elem;
4467        int num = 0;
4468        for (int i = 0; i < 3; i++) {
4469           for (int j = 0; j < 3; j++) {
4470              transform_elem.numerator =
4471                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
4472              transform_elem.denominator =
4473                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
4474              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
4475              num++;
4476           }
4477        }
4478        rc =
4479            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
4480                    sizeof(colorCorrectTransform), &colorCorrectTransform);
4481    }
4482
4483    cam_trigger_t aecTrigger;
4484    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
4485    aecTrigger.trigger_id = -1;
4486    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
4487        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
4488        aecTrigger.trigger =
4489            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
4490        aecTrigger.trigger_id =
4491            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
4492        rc = AddSetMetaEntryToBatch(hal_metadata,
4493                CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
4494                sizeof(aecTrigger), &aecTrigger);
4495    }
4496    /*af_trigger must come with a trigger id*/
4497    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
4498        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
4499        cam_trigger_t af_trigger;
4500        af_trigger.trigger =
4501            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
4502        af_trigger.trigger_id =
4503            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
4504        rc = AddSetMetaEntryToBatch(hal_metadata,
4505                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
4506    }
4507
4508    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
4509        int32_t demosaic =
4510            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
4511        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
4512                sizeof(demosaic), &demosaic);
4513    }
4514
4515    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
4516        cam_edge_application_t edge_application;
4517        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4518        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
4519            edge_application.sharpness = 0;
4520        } else {
4521            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
4522                uint8_t edgeStrength =
4523                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
4524                edge_application.sharpness = (int32_t)edgeStrength;
4525            } else {
4526                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
4527            }
4528        }
4529        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
4530                sizeof(edge_application), &edge_application);
4531    }
4532
4533    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
4534        int32_t respectFlashMode = 1;
4535        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4536            uint8_t fwk_aeMode =
4537                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4538            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
4539                respectFlashMode = 0;
4540                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
4541                    __func__);
4542            }
4543        }
4544        if (respectFlashMode) {
4545            uint8_t flashMode =
4546                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
4547            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
4548                                          sizeof(FLASH_MODES_MAP),
4549                                          flashMode);
4550            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
4551            // To check: CAM_INTF_META_FLASH_MODE usage
4552            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4553                          sizeof(flashMode), &flashMode);
4554        }
4555    }
4556
4557    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
4558        uint8_t flashPower =
4559            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
4560        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
4561                sizeof(flashPower), &flashPower);
4562    }
4563
4564    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
4565        int64_t flashFiringTime =
4566            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
4567        rc = AddSetMetaEntryToBatch(hal_metadata,
4568                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
4569    }
4570
4571    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
4572        uint8_t hotPixelMode =
4573            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
4574        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
4575                sizeof(hotPixelMode), &hotPixelMode);
4576    }
4577
4578    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
4579        float lensAperture =
4580            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
4581        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
4582                sizeof(lensAperture), &lensAperture);
4583    }
4584
4585    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
4586        float filterDensity =
4587            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
4588        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
4589                sizeof(filterDensity), &filterDensity);
4590    }
4591
4592    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4593        float focalLength =
4594            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4595        rc = AddSetMetaEntryToBatch(hal_metadata,
4596                CAM_INTF_META_LENS_FOCAL_LENGTH,
4597                sizeof(focalLength), &focalLength);
4598    }
4599
4600    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
4601        uint8_t optStabMode =
4602            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
4603        rc = AddSetMetaEntryToBatch(hal_metadata,
4604                CAM_INTF_META_LENS_OPT_STAB_MODE,
4605                sizeof(optStabMode), &optStabMode);
4606    }
4607
4608    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4609        uint8_t noiseRedMode =
4610            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4611        rc = AddSetMetaEntryToBatch(hal_metadata,
4612                CAM_INTF_META_NOISE_REDUCTION_MODE,
4613                sizeof(noiseRedMode), &noiseRedMode);
4614    }
4615
4616    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
4617        uint8_t noiseRedStrength =
4618            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
4619        rc = AddSetMetaEntryToBatch(hal_metadata,
4620                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
4621                sizeof(noiseRedStrength), &noiseRedStrength);
4622    }
4623
4624    cam_crop_region_t scalerCropRegion;
4625    bool scalerCropSet = false;
4626    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
4627        scalerCropRegion.left =
4628            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
4629        scalerCropRegion.top =
4630            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
4631        scalerCropRegion.width =
4632            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
4633        scalerCropRegion.height =
4634            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
4635        rc = AddSetMetaEntryToBatch(hal_metadata,
4636                CAM_INTF_META_SCALER_CROP_REGION,
4637                sizeof(scalerCropRegion), &scalerCropRegion);
4638        scalerCropSet = true;
4639    }
4640
4641    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
4642        int64_t sensorExpTime =
4643            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
4644        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
4645        rc = AddSetMetaEntryToBatch(hal_metadata,
4646                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
4647                sizeof(sensorExpTime), &sensorExpTime);
4648    }
4649
4650    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
4651        int64_t sensorFrameDuration =
4652            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
4653        int64_t minFrameDuration = getMinFrameDuration(request);
4654        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
4655        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
4656            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
4657        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
4658        rc = AddSetMetaEntryToBatch(hal_metadata,
4659                CAM_INTF_META_SENSOR_FRAME_DURATION,
4660                sizeof(sensorFrameDuration), &sensorFrameDuration);
4661    }
4662
4663    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4664        int32_t sensorSensitivity =
4665            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4666        if (sensorSensitivity <
4667                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
4668            sensorSensitivity =
4669                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
4670        if (sensorSensitivity >
4671                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
4672            sensorSensitivity =
4673                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
4674        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
4675        rc = AddSetMetaEntryToBatch(hal_metadata,
4676                CAM_INTF_META_SENSOR_SENSITIVITY,
4677                sizeof(sensorSensitivity), &sensorSensitivity);
4678    }
4679
4680    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
4681        int32_t shadingMode =
4682            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
4683        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
4684                sizeof(shadingMode), &shadingMode);
4685    }
4686
4687    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
4688        uint8_t shadingStrength =
4689            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
4690        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
4691                sizeof(shadingStrength), &shadingStrength);
4692    }
4693
4694    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
4695        uint8_t fwk_facedetectMode =
4696            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
4697        uint8_t facedetectMode =
4698            lookupHalName(FACEDETECT_MODES_MAP,
4699                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
4700        rc = AddSetMetaEntryToBatch(hal_metadata,
4701                CAM_INTF_META_STATS_FACEDETECT_MODE,
4702                sizeof(facedetectMode), &facedetectMode);
4703    }
4704
4705    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
4706        uint8_t histogramMode =
4707            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
4708        rc = AddSetMetaEntryToBatch(hal_metadata,
4709                CAM_INTF_META_STATS_HISTOGRAM_MODE,
4710                sizeof(histogramMode), &histogramMode);
4711    }
4712
4713    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
4714        uint8_t sharpnessMapMode =
4715            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
4716        rc = AddSetMetaEntryToBatch(hal_metadata,
4717                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
4718                sizeof(sharpnessMapMode), &sharpnessMapMode);
4719    }
4720
4721    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
4722        uint8_t tonemapMode =
4723            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
4724        rc = AddSetMetaEntryToBatch(hal_metadata,
4725                CAM_INTF_META_TONEMAP_MODE,
4726                sizeof(tonemapMode), &tonemapMode);
4727    }
4728    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
4729    /*All tonemap channels will have the same number of points*/
4730    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
4731        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
4732        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
4733        cam_rgb_tonemap_curves tonemapCurves;
4734        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
4735
4736        /* ch0 = G*/
4737        int point = 0;
4738        cam_tonemap_curve_t tonemapCurveGreen;
4739        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
4740            for (int j = 0; j < 2; j++) {
4741               tonemapCurveGreen.tonemap_points[i][j] =
4742                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
4743               point++;
4744            }
4745        }
4746        tonemapCurves.curves[0] = tonemapCurveGreen;
4747
4748        /* ch 1 = B */
4749        point = 0;
4750        cam_tonemap_curve_t tonemapCurveBlue;
4751        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4752            for (int j = 0; j < 2; j++) {
4753               tonemapCurveBlue.tonemap_points[i][j] =
4754                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
4755               point++;
4756            }
4757        }
4758        tonemapCurves.curves[1] = tonemapCurveBlue;
4759
4760        /* ch 2 = R */
4761        point = 0;
4762        cam_tonemap_curve_t tonemapCurveRed;
4763        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4764            for (int j = 0; j < 2; j++) {
4765               tonemapCurveRed.tonemap_points[i][j] =
4766                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
4767               point++;
4768            }
4769        }
4770        tonemapCurves.curves[2] = tonemapCurveRed;
4771
4772        rc = AddSetMetaEntryToBatch(hal_metadata,
4773                CAM_INTF_META_TONEMAP_CURVES,
4774                sizeof(tonemapCurves), &tonemapCurves);
4775    }
4776
4777    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4778        uint8_t captureIntent =
4779            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4780        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
4781                sizeof(captureIntent), &captureIntent);
4782    }
4783
4784    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
4785        uint8_t blackLevelLock =
4786            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
4787        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
4788                sizeof(blackLevelLock), &blackLevelLock);
4789    }
4790
4791    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
4792        uint8_t lensShadingMapMode =
4793            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
4794        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
4795                sizeof(lensShadingMapMode), &lensShadingMapMode);
4796    }
4797
4798    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
4799        cam_area_t roi;
4800        bool reset = true;
4801        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
4802        if (scalerCropSet) {
4803            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4804        }
4805        if (reset) {
4806            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
4807                    sizeof(roi), &roi);
4808        }
4809    }
4810
4811    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
4812        cam_area_t roi;
4813        bool reset = true;
4814        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
4815        if (scalerCropSet) {
4816            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4817        }
4818        if (reset) {
4819            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
4820                    sizeof(roi), &roi);
4821        }
4822    }
4823
4824    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
4825        cam_area_t roi;
4826        bool reset = true;
4827        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
4828        if (scalerCropSet) {
4829            reset = resetIfNeededROI(&roi, &scalerCropRegion);
4830        }
4831        if (reset) {
4832            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AWB_REGIONS,
4833                    sizeof(roi), &roi);
4834        }
4835    }
4836
4837    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
4838        cam_test_pattern_data_t testPatternData;
4839        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
4840        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
4841               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
4842
4843        memset(&testPatternData, 0, sizeof(testPatternData));
4844        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
4845        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
4846                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
4847            int32_t* fwk_testPatternData = frame_settings.find(
4848                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
4849            testPatternData.r = fwk_testPatternData[0];
4850            testPatternData.b = fwk_testPatternData[3];
4851            switch (gCamCapability[mCameraId]->color_arrangement) {
4852            case CAM_FILTER_ARRANGEMENT_RGGB:
4853            case CAM_FILTER_ARRANGEMENT_GRBG:
4854                testPatternData.gr = fwk_testPatternData[1];
4855                testPatternData.gb = fwk_testPatternData[2];
4856                break;
4857            case CAM_FILTER_ARRANGEMENT_GBRG:
4858            case CAM_FILTER_ARRANGEMENT_BGGR:
4859                testPatternData.gr = fwk_testPatternData[2];
4860                testPatternData.gb = fwk_testPatternData[1];
4861                break;
4862            default:
4863                ALOGE("%s: color arrangement %d is not supported", __func__,
4864                    gCamCapability[mCameraId]->color_arrangement);
4865                break;
4866            }
4867        }
4868        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
4869            sizeof(testPatternData), &testPatternData);
4870    }
4871
4872    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
4873        double *gps_coords =
4874            frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
4875        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
4876    }
4877
4878    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
4879        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
4880        const char *gps_methods_src = (const char *)
4881                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
4882        uint32_t count = frame_settings.find(
4883                ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
4884        memset(gps_methods, 0, sizeof(gps_methods));
4885        strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
4886        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
4887    }
4888
4889    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
4890        int64_t gps_timestamp =
4891            frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
4892        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
4893    }
4894
4895    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
4896        int32_t orientation =
4897            frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
4898        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
4899    }
4900
4901    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
4902        int8_t quality =
4903            frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
4904        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
4905    }
4906
4907    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
4908        int8_t thumb_quality =
4909            frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
4910        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
4911    }
4912
4913    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
4914        cam_dimension_t dim;
4915        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
4916        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
4917        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
4918    }
4919
4920    // Internal metadata
4921    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
4922        uint8_t* privatedata =
4923            frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
4924        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
4925            sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
4926    }
4927
4928    // EV step
4929    rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
4930            sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
4931
4932    return rc;
4933}
4934
4935/*===========================================================================
4936 * FUNCTION   : captureResultCb
4937 *
4938 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
4939 *
4940 * PARAMETERS :
4941 *   @frame  : frame information from mm-camera-interface
4942 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
4943 *   @userdata: userdata
4944 *
4945 * RETURN     : NONE
4946 *==========================================================================*/
4947void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
4948                camera3_stream_buffer_t *buffer,
4949                uint32_t frame_number, void *userdata)
4950{
4951    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
4952    if (hw == NULL) {
4953        ALOGE("%s: Invalid hw %p", __func__, hw);
4954        return;
4955    }
4956
4957    hw->captureResultCb(metadata, buffer, frame_number);
4958    return;
4959}
4960
4961
4962/*===========================================================================
4963 * FUNCTION   : initialize
4964 *
4965 * DESCRIPTION: Pass framework callback pointers to HAL
4966 *
4967 * PARAMETERS :
4968 *
4969 *
4970 * RETURN     : Success : 0
4971 *              Failure: -ENODEV
4972 *==========================================================================*/
4973
4974int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
4975                                  const camera3_callback_ops_t *callback_ops)
4976{
4977    ALOGV("%s: E", __func__);
4978    QCamera3HardwareInterface *hw =
4979        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
4980    if (!hw) {
4981        ALOGE("%s: NULL camera device", __func__);
4982        return -ENODEV;
4983    }
4984
4985    int rc = hw->initialize(callback_ops);
4986    ALOGV("%s: X", __func__);
4987    return rc;
4988}
4989
4990/*===========================================================================
4991 * FUNCTION   : configure_streams
4992 *
4993 * DESCRIPTION:
4994 *
4995 * PARAMETERS :
4996 *
4997 *
4998 * RETURN     : Success: 0
4999 *              Failure: -EINVAL (if stream configuration is invalid)
5000 *                       -ENODEV (fatal error)
5001 *==========================================================================*/
5002
5003int QCamera3HardwareInterface::configure_streams(
5004        const struct camera3_device *device,
5005        camera3_stream_configuration_t *stream_list)
5006{
5007    ALOGV("%s: E", __func__);
5008    QCamera3HardwareInterface *hw =
5009        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5010    if (!hw) {
5011        ALOGE("%s: NULL camera device", __func__);
5012        return -ENODEV;
5013    }
5014    int rc = hw->configureStreams(stream_list);
5015    ALOGV("%s: X", __func__);
5016    return rc;
5017}
5018
5019/*===========================================================================
5020 * FUNCTION   : register_stream_buffers
5021 *
5022 * DESCRIPTION: Register stream buffers with the device
5023 *
5024 * PARAMETERS :
5025 *
5026 * RETURN     :
5027 *==========================================================================*/
5028int QCamera3HardwareInterface::register_stream_buffers(
5029        const struct camera3_device *device,
5030        const camera3_stream_buffer_set_t *buffer_set)
5031{
5032    ALOGV("%s: E", __func__);
5033    QCamera3HardwareInterface *hw =
5034        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5035    if (!hw) {
5036        ALOGE("%s: NULL camera device", __func__);
5037        return -ENODEV;
5038    }
5039    int rc = hw->registerStreamBuffers(buffer_set);
5040    ALOGV("%s: X", __func__);
5041    return rc;
5042}
5043
5044/*===========================================================================
5045 * FUNCTION   : construct_default_request_settings
5046 *
5047 * DESCRIPTION: Configure a settings buffer to meet the required use case
5048 *
5049 * PARAMETERS :
5050 *
5051 *
5052 * RETURN     : Success: Return valid metadata
5053 *              Failure: Return NULL
5054 *==========================================================================*/
5055const camera_metadata_t* QCamera3HardwareInterface::
5056    construct_default_request_settings(const struct camera3_device *device,
5057                                        int type)
5058{
5059
5060    ALOGV("%s: E", __func__);
5061    camera_metadata_t* fwk_metadata = NULL;
5062    QCamera3HardwareInterface *hw =
5063        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5064    if (!hw) {
5065        ALOGE("%s: NULL camera device", __func__);
5066        return NULL;
5067    }
5068
5069    fwk_metadata = hw->translateCapabilityToMetadata(type);
5070
5071    ALOGV("%s: X", __func__);
5072    return fwk_metadata;
5073}
5074
5075/*===========================================================================
5076 * FUNCTION   : process_capture_request
5077 *
5078 * DESCRIPTION:
5079 *
5080 * PARAMETERS :
5081 *
5082 *
5083 * RETURN     :
5084 *==========================================================================*/
5085int QCamera3HardwareInterface::process_capture_request(
5086                    const struct camera3_device *device,
5087                    camera3_capture_request_t *request)
5088{
5089    ALOGV("%s: E", __func__);
5090    QCamera3HardwareInterface *hw =
5091        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5092    if (!hw) {
5093        ALOGE("%s: NULL camera device", __func__);
5094        return -EINVAL;
5095    }
5096
5097    int rc = hw->processCaptureRequest(request);
5098    ALOGV("%s: X", __func__);
5099    return rc;
5100}
5101
5102/*===========================================================================
5103 * FUNCTION   : get_metadata_vendor_tag_ops
5104 *
5105 * DESCRIPTION: Get the metadata vendor tag function pointers
5106 *
5107 * PARAMETERS :
5108 *    @ops   : function pointer table to be filled by HAL
5109 *
5110 *
5111 * RETURN     : NONE
5112 *==========================================================================*/
5113void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
5114                const struct camera3_device * /*device*/,
5115                vendor_tag_query_ops_t* ops)
5116{
5117    ALOGV("%s: E", __func__);
5118    ops->get_camera_vendor_section_name = get_camera_vendor_section_name;
5119    ops->get_camera_vendor_tag_name = get_camera_vendor_tag_name;
5120    ops->get_camera_vendor_tag_type = get_camera_vendor_tag_type;
5121    ALOGV("%s: X", __func__);
5122    return;
5123}
5124
5125/*===========================================================================
5126 * FUNCTION   : get_camera_vendor_section_name
5127 *
5128 * DESCRIPTION: Get section name for vendor tag
5129 *
5130 * PARAMETERS :
5131 *    @tag   :  Vendor specific tag
5132 *
5133 *
5134 * RETURN     : Success: the section name of the specific tag
5135 *              Failure: NULL
5136 *==========================================================================*/
5137
5138const char* QCamera3HardwareInterface::get_camera_vendor_section_name(
5139                const vendor_tag_query_ops_t * /*ops*/,
5140                uint32_t tag)
5141{
5142    ALOGV("%s: E", __func__);
5143    const char *ret;
5144    uint32_t section = tag >> 16;
5145
5146    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
5147        ret = NULL;
5148    else
5149        ret = qcamera3_ext_section_names[section - VENDOR_SECTION];
5150
5151    ALOGV("%s: X", __func__);
5152    return ret;
5153}
5154
5155/*===========================================================================
5156 * FUNCTION   : get_camera_vendor_tag_name
5157 *
5158 * DESCRIPTION: Get name of a vendor specific tag
5159 *
5160 * PARAMETERS :
5161 *    @tag   :  Vendor specific tag
5162 *
5163 *
5164 * RETURN     : Success: the name of the specific tag
5165 *              Failure: NULL
5166 *==========================================================================*/
5167const char* QCamera3HardwareInterface::get_camera_vendor_tag_name(
5168                const vendor_tag_query_ops_t * /*ops*/,
5169                uint32_t tag)
5170{
5171    ALOGV("%s: E", __func__);
5172    const char *ret;
5173    uint32_t section = tag >> 16;
5174    uint32_t section_index = section - VENDOR_SECTION;
5175    uint32_t tag_index = tag & 0xFFFF;
5176
5177    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
5178        ret = NULL;
5179    else if (tag >= (uint32_t)qcamera3_ext3_section_bounds[section_index])
5180        ret = NULL;
5181    else
5182        ret = qcamera3_tag_info[section_index][tag_index].tag_name;
5183
5184    ALOGV("%s: X", __func__);
5185    return ret;
5186}
5187
5188/*===========================================================================
5189 * FUNCTION   : get_camera_vendor_tag_type
5190 *
5191 * DESCRIPTION: Get type of a vendor specific tag
5192 *
5193 * PARAMETERS :
5194 *    @tag   :  Vendor specific tag
5195 *
5196 *
5197 * RETURN     : Success: the type of the specific tag
5198 *              Failure: -1
5199 *==========================================================================*/
5200int QCamera3HardwareInterface::get_camera_vendor_tag_type(
5201                const vendor_tag_query_ops_t * /*ops*/,
5202                uint32_t tag)
5203{
5204    ALOGV("%s: E", __func__);
5205    int ret;
5206    uint32_t section = tag >> 16;
5207    uint32_t section_index = section - VENDOR_SECTION;
5208    uint32_t tag_index = tag & 0xFFFF;
5209
5210    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
5211        ret = -1;
5212    else if (tag >= (uint32_t )qcamera3_ext3_section_bounds[section_index])
5213        ret = -1;
5214    else
5215        ret = qcamera3_tag_info[section_index][tag_index].tag_type;
5216
5217    ALOGV("%s: X", __func__);
5218    return ret;
5219}
5220
5221/*===========================================================================
5222 * FUNCTION   : dump
5223 *
5224 * DESCRIPTION:
5225 *
5226 * PARAMETERS :
5227 *
5228 *
5229 * RETURN     :
5230 *==========================================================================*/
5231
5232void QCamera3HardwareInterface::dump(
5233                const struct camera3_device *device, int fd)
5234{
5235    ALOGV("%s: E", __func__);
5236    QCamera3HardwareInterface *hw =
5237        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5238    if (!hw) {
5239        ALOGE("%s: NULL camera device", __func__);
5240        return;
5241    }
5242
5243    hw->dump(fd);
5244    ALOGV("%s: X", __func__);
5245    return;
5246}
5247
5248/*===========================================================================
5249 * FUNCTION   : flush
5250 *
5251 * DESCRIPTION:
5252 *
5253 * PARAMETERS :
5254 *
5255 *
5256 * RETURN     :
5257 *==========================================================================*/
5258
5259int QCamera3HardwareInterface::flush(
5260                const struct camera3_device *device)
5261{
5262    int rc;
5263    ALOGV("%s: E", __func__);
5264    QCamera3HardwareInterface *hw =
5265        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5266    if (!hw) {
5267        ALOGE("%s: NULL camera device", __func__);
5268        return -EINVAL;
5269    }
5270
5271    rc = hw->flush();
5272    ALOGV("%s: X", __func__);
5273    return rc;
5274}
5275
5276/*===========================================================================
5277 * FUNCTION   : close_camera_device
5278 *
5279 * DESCRIPTION:
5280 *
5281 * PARAMETERS :
5282 *
5283 *
5284 * RETURN     :
5285 *==========================================================================*/
5286int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
5287{
5288    ALOGV("%s: E", __func__);
5289    int ret = NO_ERROR;
5290    QCamera3HardwareInterface *hw =
5291        reinterpret_cast<QCamera3HardwareInterface *>(
5292            reinterpret_cast<camera3_device_t *>(device)->priv);
5293    if (!hw) {
5294        ALOGE("NULL camera device");
5295        return BAD_VALUE;
5296    }
5297    delete hw;
5298
5299    pthread_mutex_lock(&mCameraSessionLock);
5300    mCameraSessionActive = 0;
5301    pthread_mutex_unlock(&mCameraSessionLock);
5302    ALOGV("%s: X", __func__);
5303    return ret;
5304}
5305
5306/*===========================================================================
5307 * FUNCTION   : getWaveletDenoiseProcessPlate
5308 *
5309 * DESCRIPTION: query wavelet denoise process plate
5310 *
5311 * PARAMETERS : None
5312 *
5313 * RETURN     : WNR prcocess plate vlaue
5314 *==========================================================================*/
5315cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
5316{
5317    char prop[PROPERTY_VALUE_MAX];
5318    memset(prop, 0, sizeof(prop));
5319    property_get("persist.denoise.process.plates", prop, "0");
5320    int processPlate = atoi(prop);
5321    switch(processPlate) {
5322    case 0:
5323        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
5324    case 1:
5325        return CAM_WAVELET_DENOISE_CBCR_ONLY;
5326    case 2:
5327        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5328    case 3:
5329        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
5330    default:
5331        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5332    }
5333}
5334
5335/*===========================================================================
5336 * FUNCTION   : needRotationReprocess
5337 *
5338 * DESCRIPTION: if rotation needs to be done by reprocess in pp
5339 *
5340 * PARAMETERS : none
5341 *
5342 * RETURN     : true: needed
5343 *              false: no need
5344 *==========================================================================*/
5345bool QCamera3HardwareInterface::needRotationReprocess()
5346{
5347    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
5348        // current rotation is not zero, and pp has the capability to process rotation
5349        ALOGD("%s: need do reprocess for rotation", __func__);
5350        return true;
5351    }
5352
5353    return false;
5354}
5355
5356/*===========================================================================
5357 * FUNCTION   : needReprocess
5358 *
5359 * DESCRIPTION: if reprocess in needed
5360 *
5361 * PARAMETERS : none
5362 *
5363 * RETURN     : true: needed
5364 *              false: no need
5365 *==========================================================================*/
5366bool QCamera3HardwareInterface::needReprocess()
5367{
5368    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
5369        // TODO: add for ZSL HDR later
5370        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5371        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5372        return true;
5373    }
5374    return needRotationReprocess();
5375}
5376
5377/*===========================================================================
5378 * FUNCTION   : addOfflineReprocChannel
5379 *
5380 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
5381 *              coming from input channel
5382 *
5383 * PARAMETERS :
5384 *   @pInputChannel : ptr to input channel whose frames will be post-processed
5385 *
5386 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
5387 *==========================================================================*/
5388QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
5389              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
5390{
5391    int32_t rc = NO_ERROR;
5392    QCamera3ReprocessChannel *pChannel = NULL;
5393    if (pInputChannel == NULL) {
5394        ALOGE("%s: input channel obj is NULL", __func__);
5395        return NULL;
5396    }
5397
5398    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5399            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5400    if (NULL == pChannel) {
5401        ALOGE("%s: no mem for reprocess channel", __func__);
5402        return NULL;
5403    }
5404
5405    rc = pChannel->initialize();
5406    if (rc != NO_ERROR) {
5407        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5408        delete pChannel;
5409        return NULL;
5410    }
5411
5412    // pp feature config
5413    cam_pp_feature_config_t pp_config;
5414    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5415
5416    if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
5417        cam_edge_application_t *edge = (cam_edge_application_t *)
5418                POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
5419        if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
5420            pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5421            pp_config.sharpness = edge->sharpness;
5422        }
5423    }
5424
5425    if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
5426        uint8_t *noise_mode = (uint8_t *)POINTER_OF(
5427                CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
5428        if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
5429            pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5430            pp_config.denoise2d.denoise_enable = 1;
5431            pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5432        }
5433    }
5434
5435    if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
5436        int32_t *rotation = (int32_t *)POINTER_OF(
5437                CAM_INTF_META_JPEG_ORIENTATION, metadata);
5438
5439        if (needRotationReprocess()) {
5440            pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5441            if (*rotation == 0) {
5442                pp_config.rotation = ROTATE_0;
5443            } else if (*rotation == 90) {
5444                pp_config.rotation = ROTATE_90;
5445            } else if (*rotation == 180) {
5446                pp_config.rotation = ROTATE_180;
5447            } else if (*rotation == 270) {
5448                pp_config.rotation = ROTATE_270;
5449            }
5450        }
5451    }
5452
5453    rc = pChannel->addReprocStreamsFromSource(pp_config,
5454                                             pInputChannel,
5455                                             mMetadataChannel);
5456
5457    if (rc != NO_ERROR) {
5458        delete pChannel;
5459        return NULL;
5460    }
5461    return pChannel;
5462}
5463
5464}; //end namespace qcamera
5465