QCamera3HWI.cpp revision c232705a3fe88a15722efec36dba335e417b7feb
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48
49#define MAX(a, b) ((a) > (b) ? (a) : (b))
50
51#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
52cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
53parm_buffer_t *prevSettings;
54const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
55
56pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
57    PTHREAD_MUTEX_INITIALIZER;
58unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
59
60const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
61    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
62    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
63    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
64    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
65    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
66    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
67    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
68    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
69    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
70};
71
72const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
73    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
74    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
75    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
76    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
77    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
78    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
79    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
80    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
81    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
82};
83
84const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
85    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
86    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
88    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
89    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
90    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
91    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
92    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
93    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
94    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
95    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
96    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
97    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
98    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
99    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
100};
101
102const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
103    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
104    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
105    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
106    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
107    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
108    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
113    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
114    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
115    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
116};
117
118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
119    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
120    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
121    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
122    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
123    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
124};
125
126const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
127    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
128    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
129    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
130};
131
132const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
133    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
134    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
135};
136
137const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
138                                             320, 240, 176, 144, 0, 0};
139
140camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
141    initialize:                         QCamera3HardwareInterface::initialize,
142    configure_streams:                  QCamera3HardwareInterface::configure_streams,
143    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
144    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
145    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
146    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
147    dump:                               QCamera3HardwareInterface::dump,
148    flush:                              QCamera3HardwareInterface::flush,
149    reserved:                           {0},
150};
151
152
153/*===========================================================================
154 * FUNCTION   : QCamera3HardwareInterface
155 *
156 * DESCRIPTION: constructor of QCamera3HardwareInterface
157 *
158 * PARAMETERS :
159 *   @cameraId  : camera ID
160 *
161 * RETURN     : none
162 *==========================================================================*/
163QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
164    : mCameraId(cameraId),
165      mCameraHandle(NULL),
166      mCameraOpened(false),
167      mCameraInitialized(false),
168      mCallbackOps(NULL),
169      mInputStream(NULL),
170      mMetadataChannel(NULL),
171      mPictureChannel(NULL),
172      mFirstRequest(false),
173      mParamHeap(NULL),
174      mParameters(NULL),
175      mJpegSettings(NULL),
176      mIsZslMode(false),
177      mMinProcessedFrameDuration(0),
178      mMinJpegFrameDuration(0),
179      mMinRawFrameDuration(0),
180      m_pPowerModule(NULL)
181{
182    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
183    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
184    mCameraDevice.common.close = close_camera_device;
185    mCameraDevice.ops = &mCameraOps;
186    mCameraDevice.priv = this;
187    gCamCapability[cameraId]->version = CAM_HAL_V3;
188    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
189    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
190    gCamCapability[cameraId]->min_num_pp_bufs = 3;
191
192    pthread_cond_init(&mRequestCond, NULL);
193    mPendingRequest = 0;
194    mCurrentRequestId = -1;
195    pthread_mutex_init(&mMutex, NULL);
196
197    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
198        mDefaultMetadata[i] = NULL;
199
200#ifdef HAS_MULTIMEDIA_HINTS
201    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
202        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
203    }
204#endif
205}
206
207/*===========================================================================
208 * FUNCTION   : ~QCamera3HardwareInterface
209 *
210 * DESCRIPTION: destructor of QCamera3HardwareInterface
211 *
212 * PARAMETERS : none
213 *
214 * RETURN     : none
215 *==========================================================================*/
216QCamera3HardwareInterface::~QCamera3HardwareInterface()
217{
218    ALOGV("%s: E", __func__);
219    /* We need to stop all streams before deleting any stream */
220        /*flush the metadata list*/
221    if (!mStoredMetadataList.empty()) {
222        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
223              m != mStoredMetadataList.end(); m++) {
224            mMetadataChannel->bufDone(m->meta_buf);
225            free(m->meta_buf);
226            m = mStoredMetadataList.erase(m);
227        }
228    }
229    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
230        it != mStreamInfo.end(); it++) {
231        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
232        if (channel)
233           channel->stop();
234    }
235    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
236        it != mStreamInfo.end(); it++) {
237        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
238        if (channel)
239            delete channel;
240        free (*it);
241    }
242
243    mPictureChannel = NULL;
244
245    if (mJpegSettings != NULL) {
246        free(mJpegSettings);
247        mJpegSettings = NULL;
248    }
249
250    /* Clean up all channels */
251    if (mCameraInitialized) {
252        mMetadataChannel->stop();
253        delete mMetadataChannel;
254        mMetadataChannel = NULL;
255        deinitParameters();
256    }
257
258    if (mCameraOpened)
259        closeCamera();
260
261    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
262        if (mDefaultMetadata[i])
263            free_camera_metadata(mDefaultMetadata[i]);
264
265    pthread_cond_destroy(&mRequestCond);
266
267    pthread_mutex_destroy(&mMutex);
268    ALOGV("%s: X", __func__);
269}
270
271/*===========================================================================
272 * FUNCTION   : openCamera
273 *
274 * DESCRIPTION: open camera
275 *
276 * PARAMETERS :
277 *   @hw_device  : double ptr for camera device struct
278 *
279 * RETURN     : int32_t type of status
280 *              NO_ERROR  -- success
281 *              none-zero failure code
282 *==========================================================================*/
283int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
284{
285    int rc = 0;
286    pthread_mutex_lock(&mCameraSessionLock);
287    if (mCameraSessionActive) {
288        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
289        pthread_mutex_unlock(&mCameraSessionLock);
290        return INVALID_OPERATION;
291    }
292
293    if (mCameraOpened) {
294        *hw_device = NULL;
295        return PERMISSION_DENIED;
296    }
297
298    rc = openCamera();
299    if (rc == 0) {
300        *hw_device = &mCameraDevice.common;
301        mCameraSessionActive = 1;
302    } else
303        *hw_device = NULL;
304
305#ifdef HAS_MULTIMEDIA_HINTS
306    if (rc == 0) {
307        if (m_pPowerModule) {
308            if (m_pPowerModule->powerHint) {
309                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
310                        (void *)"state=1");
311            }
312        }
313    }
314#endif
315    pthread_mutex_unlock(&mCameraSessionLock);
316    return rc;
317}
318
319/*===========================================================================
320 * FUNCTION   : openCamera
321 *
322 * DESCRIPTION: open camera
323 *
324 * PARAMETERS : none
325 *
326 * RETURN     : int32_t type of status
327 *              NO_ERROR  -- success
328 *              none-zero failure code
329 *==========================================================================*/
330int QCamera3HardwareInterface::openCamera()
331{
332    if (mCameraHandle) {
333        ALOGE("Failure: Camera already opened");
334        return ALREADY_EXISTS;
335    }
336    mCameraHandle = camera_open(mCameraId);
337    if (!mCameraHandle) {
338        ALOGE("camera_open failed.");
339        return UNKNOWN_ERROR;
340    }
341
342    mCameraOpened = true;
343
344    return NO_ERROR;
345}
346
347/*===========================================================================
348 * FUNCTION   : closeCamera
349 *
350 * DESCRIPTION: close camera
351 *
352 * PARAMETERS : none
353 *
354 * RETURN     : int32_t type of status
355 *              NO_ERROR  -- success
356 *              none-zero failure code
357 *==========================================================================*/
358int QCamera3HardwareInterface::closeCamera()
359{
360    int rc = NO_ERROR;
361
362    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
363    mCameraHandle = NULL;
364    mCameraOpened = false;
365
366#ifdef HAS_MULTIMEDIA_HINTS
367    if (rc == NO_ERROR) {
368        if (m_pPowerModule) {
369            if (m_pPowerModule->powerHint) {
370                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
371                        (void *)"state=0");
372            }
373        }
374    }
375#endif
376
377    return rc;
378}
379
380/*===========================================================================
381 * FUNCTION   : initialize
382 *
383 * DESCRIPTION: Initialize frameworks callback functions
384 *
385 * PARAMETERS :
386 *   @callback_ops : callback function to frameworks
387 *
388 * RETURN     :
389 *
390 *==========================================================================*/
391int QCamera3HardwareInterface::initialize(
392        const struct camera3_callback_ops *callback_ops)
393{
394    int rc;
395
396    pthread_mutex_lock(&mMutex);
397
398    rc = initParameters();
399    if (rc < 0) {
400        ALOGE("%s: initParamters failed %d", __func__, rc);
401       goto err1;
402    }
403    //Create metadata channel and initialize it
404    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
405                    mCameraHandle->ops, captureResultCb,
406                    &gCamCapability[mCameraId]->padding_info, this);
407    if (mMetadataChannel == NULL) {
408        ALOGE("%s: failed to allocate metadata channel", __func__);
409        rc = -ENOMEM;
410        goto err2;
411    }
412    rc = mMetadataChannel->initialize();
413    if (rc < 0) {
414        ALOGE("%s: metadata channel initialization failed", __func__);
415        goto err3;
416    }
417
418    mCallbackOps = callback_ops;
419
420    pthread_mutex_unlock(&mMutex);
421    mCameraInitialized = true;
422    return 0;
423
424err3:
425    delete mMetadataChannel;
426    mMetadataChannel = NULL;
427err2:
428    deinitParameters();
429err1:
430    pthread_mutex_unlock(&mMutex);
431    return rc;
432}
433
434/*===========================================================================
435 * FUNCTION   : configureStreams
436 *
437 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
438 *              and output streams.
439 *
440 * PARAMETERS :
441 *   @stream_list : streams to be configured
442 *
443 * RETURN     :
444 *
445 *==========================================================================*/
446int QCamera3HardwareInterface::configureStreams(
447        camera3_stream_configuration_t *streamList)
448{
449    int rc = 0;
450    mIsZslMode = false;
451    pthread_mutex_lock(&mMutex);
452    // Sanity check stream_list
453    if (streamList == NULL) {
454        ALOGE("%s: NULL stream configuration", __func__);
455        pthread_mutex_unlock(&mMutex);
456        return BAD_VALUE;
457    }
458
459    if (streamList->streams == NULL) {
460        ALOGE("%s: NULL stream list", __func__);
461        pthread_mutex_unlock(&mMutex);
462        return BAD_VALUE;
463    }
464
465    if (streamList->num_streams < 1) {
466        ALOGE("%s: Bad number of streams requested: %d", __func__,
467                streamList->num_streams);
468        pthread_mutex_unlock(&mMutex);
469        return BAD_VALUE;
470    }
471
472    camera3_stream_t *inputStream = NULL;
473    camera3_stream_t *jpegStream = NULL;
474    /* first invalidate all the steams in the mStreamList
475     * if they appear again, they will be validated */
476    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
477            it != mStreamInfo.end(); it++) {
478        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
479        channel->stop();
480        (*it)->status = INVALID;
481    }
482
483    for (size_t i = 0; i < streamList->num_streams; i++) {
484        camera3_stream_t *newStream = streamList->streams[i];
485        ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
486                __func__, newStream->stream_type, newStream->format,
487                 newStream->width, newStream->height);
488        //if the stream is in the mStreamList validate it
489        bool stream_exists = false;
490        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
491                it != mStreamInfo.end(); it++) {
492            if ((*it)->stream == newStream) {
493                QCamera3Channel *channel =
494                    (QCamera3Channel*)(*it)->stream->priv;
495                stream_exists = true;
496                (*it)->status = RECONFIGURE;
497                /*delete the channel object associated with the stream because
498                  we need to reconfigure*/
499                delete channel;
500                (*it)->stream->priv = NULL;
501            }
502        }
503        if (!stream_exists) {
504            //new stream
505            stream_info_t* stream_info;
506            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
507            stream_info->stream = newStream;
508            stream_info->status = VALID;
509            stream_info->registered = 0;
510            mStreamInfo.push_back(stream_info);
511        }
512        if (newStream->stream_type == CAMERA3_STREAM_INPUT
513                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
514            if (inputStream != NULL) {
515                ALOGE("%s: Multiple input streams requested!", __func__);
516                pthread_mutex_unlock(&mMutex);
517                return BAD_VALUE;
518            }
519            inputStream = newStream;
520        }
521        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
522            jpegStream = newStream;
523        }
524    }
525    mInputStream = inputStream;
526
527    /*clean up invalid streams*/
528    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
529            it != mStreamInfo.end();) {
530        if(((*it)->status) == INVALID){
531            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
532            delete channel;
533            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
534            free(*it);
535            it = mStreamInfo.erase(it);
536        } else {
537            it++;
538        }
539    }
540
541    //mMetadataChannel->stop();
542
543    /* Allocate channel objects for the requested streams */
544    for (size_t i = 0; i < streamList->num_streams; i++) {
545        camera3_stream_t *newStream = streamList->streams[i];
546        if (newStream->priv == NULL) {
547            //New stream, construct channel
548            switch (newStream->stream_type) {
549            case CAMERA3_STREAM_INPUT:
550                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
551                break;
552            case CAMERA3_STREAM_BIDIRECTIONAL:
553                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
554                    GRALLOC_USAGE_HW_CAMERA_WRITE;
555                break;
556            case CAMERA3_STREAM_OUTPUT:
557                /* For video encoding stream, set read/write rarely
558                 * flag so that they may be set to un-cached */
559                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
560                    newStream->usage =
561                         (GRALLOC_USAGE_SW_READ_RARELY |
562                         GRALLOC_USAGE_SW_WRITE_RARELY |
563                         GRALLOC_USAGE_HW_CAMERA_WRITE);
564                else
565                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
566                break;
567            default:
568                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
569                break;
570            }
571
572            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
573                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
574                QCamera3Channel *channel;
575                switch (newStream->format) {
576                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
577                case HAL_PIXEL_FORMAT_YCbCr_420_888:
578                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
579                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
580                        jpegStream) {
581                        uint32_t width = jpegStream->width;
582                        uint32_t height = jpegStream->height;
583                        mIsZslMode = true;
584                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
585                            mCameraHandle->ops, captureResultCb,
586                            &gCamCapability[mCameraId]->padding_info, this, newStream,
587                            width, height);
588                    } else
589                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
590                            mCameraHandle->ops, captureResultCb,
591                            &gCamCapability[mCameraId]->padding_info, this, newStream);
592                    if (channel == NULL) {
593                        ALOGE("%s: allocation of channel failed", __func__);
594                        pthread_mutex_unlock(&mMutex);
595                        return -ENOMEM;
596                    }
597
598                    newStream->priv = channel;
599                    break;
600                case HAL_PIXEL_FORMAT_BLOB:
601                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
602                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
603                            mCameraHandle->ops, captureResultCb,
604                            &gCamCapability[mCameraId]->padding_info, this, newStream);
605                    if (mPictureChannel == NULL) {
606                        ALOGE("%s: allocation of channel failed", __func__);
607                        pthread_mutex_unlock(&mMutex);
608                        return -ENOMEM;
609                    }
610                    newStream->priv = (QCamera3Channel*)mPictureChannel;
611                    break;
612
613                //TODO: Add support for app consumed format?
614                default:
615                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
616                    break;
617                }
618            }
619        } else {
620            // Channel already exists for this stream
621            // Do nothing for now
622        }
623    }
624    /*For the streams to be reconfigured we need to register the buffers
625      since the framework wont*/
626    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
627            it != mStreamInfo.end(); it++) {
628        if ((*it)->status == RECONFIGURE) {
629            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
630            /*only register buffers for streams that have already been
631              registered*/
632            if ((*it)->registered) {
633                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
634                        (*it)->buffer_set.buffers);
635                if (rc != NO_ERROR) {
636                    ALOGE("%s: Failed to register the buffers of old stream,\
637                            rc = %d", __func__, rc);
638                }
639                ALOGV("%s: channel %p has %d buffers",
640                        __func__, channel, (*it)->buffer_set.num_buffers);
641            }
642        }
643
644        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
645        if (index == NAME_NOT_FOUND) {
646            mPendingBuffersMap.add((*it)->stream, 0);
647        } else {
648            mPendingBuffersMap.editValueAt(index) = 0;
649        }
650    }
651
652    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
653    mPendingRequestsList.clear();
654
655    /*flush the metadata list*/
656    if (!mStoredMetadataList.empty()) {
657        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
658              m != mStoredMetadataList.end(); m++) {
659            mMetadataChannel->bufDone(m->meta_buf);
660            free(m->meta_buf);
661            m = mStoredMetadataList.erase(m);
662        }
663    }
664
665    //settings/parameters don't carry over for new configureStreams
666    memset(mParameters, 0, sizeof(parm_buffer_t));
667    mFirstRequest = true;
668
669    //Get min frame duration for this streams configuration
670    deriveMinFrameDuration();
671
672    pthread_mutex_unlock(&mMutex);
673    return rc;
674}
675
676/*===========================================================================
677 * FUNCTION   : validateCaptureRequest
678 *
679 * DESCRIPTION: validate a capture request from camera service
680 *
681 * PARAMETERS :
682 *   @request : request from framework to process
683 *
684 * RETURN     :
685 *
686 *==========================================================================*/
687int QCamera3HardwareInterface::validateCaptureRequest(
688                    camera3_capture_request_t *request)
689{
690    ssize_t idx = 0;
691    const camera3_stream_buffer_t *b;
692    CameraMetadata meta;
693
694    /* Sanity check the request */
695    if (request == NULL) {
696        ALOGE("%s: NULL capture request", __func__);
697        return BAD_VALUE;
698    }
699
700    uint32_t frameNumber = request->frame_number;
701    if (request->input_buffer != NULL &&
702            request->input_buffer->stream != mInputStream) {
703        ALOGE("%s: Request %d: Input buffer not from input stream!",
704                __FUNCTION__, frameNumber);
705        return BAD_VALUE;
706    }
707    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
708        ALOGE("%s: Request %d: No output buffers provided!",
709                __FUNCTION__, frameNumber);
710        return BAD_VALUE;
711    }
712    if (request->input_buffer != NULL) {
713        b = request->input_buffer;
714        QCamera3Channel *channel =
715            static_cast<QCamera3Channel*>(b->stream->priv);
716        if (channel == NULL) {
717            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
718                    __func__, frameNumber, idx);
719            return BAD_VALUE;
720        }
721        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
722            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
723                    __func__, frameNumber, idx);
724            return BAD_VALUE;
725        }
726        if (b->release_fence != -1) {
727            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
728                    __func__, frameNumber, idx);
729            return BAD_VALUE;
730        }
731        if (b->buffer == NULL) {
732            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
733                    __func__, frameNumber, idx);
734            return BAD_VALUE;
735        }
736    }
737
738    // Validate all buffers
739    b = request->output_buffers;
740    do {
741        QCamera3Channel *channel =
742                static_cast<QCamera3Channel*>(b->stream->priv);
743        if (channel == NULL) {
744            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
745                    __func__, frameNumber, idx);
746            return BAD_VALUE;
747        }
748        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
749            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
750                    __func__, frameNumber, idx);
751            return BAD_VALUE;
752        }
753        if (b->release_fence != -1) {
754            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
755                    __func__, frameNumber, idx);
756            return BAD_VALUE;
757        }
758        if (b->buffer == NULL) {
759            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
760                    __func__, frameNumber, idx);
761            return BAD_VALUE;
762        }
763        idx++;
764        b = request->output_buffers + idx;
765    } while (idx < (ssize_t)request->num_output_buffers);
766
767    return NO_ERROR;
768}
769
770/*===========================================================================
771 * FUNCTION   : deriveMinFrameDuration
772 *
773 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
774 *              on currently configured streams.
775 *
776 * PARAMETERS : NONE
777 *
778 * RETURN     : NONE
779 *
780 *==========================================================================*/
781void QCamera3HardwareInterface::deriveMinFrameDuration()
782{
783    int32_t maxJpegDimension, maxProcessedDimension;
784
785    maxJpegDimension = 0;
786    maxProcessedDimension = 0;
787
788    // Figure out maximum jpeg, processed, and raw dimensions
789    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
790        it != mStreamInfo.end(); it++) {
791
792        // Input stream doesn't have valid stream_type
793        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
794            continue;
795
796        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
797        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
798            if (dimension > maxJpegDimension)
799                maxJpegDimension = dimension;
800        } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
801            if (dimension > maxProcessedDimension)
802                maxProcessedDimension = dimension;
803        }
804    }
805
806    //Assume all jpeg dimensions are in processed dimensions.
807    if (maxJpegDimension > maxProcessedDimension)
808        maxProcessedDimension = maxJpegDimension;
809
810    //Find minimum durations for processed, jpeg, and raw
811    mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
812    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
813        if (maxProcessedDimension ==
814            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
815            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
816            mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
817            mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
818            break;
819        }
820    }
821}
822
823/*===========================================================================
824 * FUNCTION   : getMinFrameDuration
825 *
826 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
827 *              and current request configuration.
828 *
829 * PARAMETERS : @request: requset sent by the frameworks
830 *
831 * RETURN     : min farme duration for a particular request
832 *
833 *==========================================================================*/
834int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
835{
836    bool hasJpegStream = false;
837    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
838        const camera3_stream_t *stream = request->output_buffers[i].stream;
839        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
840            hasJpegStream = true;
841    }
842
843    if (!hasJpegStream)
844        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
845    else
846        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
847}
848
849/*===========================================================================
850 * FUNCTION   : registerStreamBuffers
851 *
852 * DESCRIPTION: Register buffers for a given stream with the HAL device.
853 *
854 * PARAMETERS :
855 *   @stream_list : streams to be configured
856 *
857 * RETURN     :
858 *
859 *==========================================================================*/
860int QCamera3HardwareInterface::registerStreamBuffers(
861        const camera3_stream_buffer_set_t *buffer_set)
862{
863    int rc = 0;
864
865    pthread_mutex_lock(&mMutex);
866
867    if (buffer_set == NULL) {
868        ALOGE("%s: Invalid buffer_set parameter.", __func__);
869        pthread_mutex_unlock(&mMutex);
870        return -EINVAL;
871    }
872    if (buffer_set->stream == NULL) {
873        ALOGE("%s: Invalid stream parameter.", __func__);
874        pthread_mutex_unlock(&mMutex);
875        return -EINVAL;
876    }
877    if (buffer_set->num_buffers < 1) {
878        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
879        pthread_mutex_unlock(&mMutex);
880        return -EINVAL;
881    }
882    if (buffer_set->buffers == NULL) {
883        ALOGE("%s: Invalid buffers parameter.", __func__);
884        pthread_mutex_unlock(&mMutex);
885        return -EINVAL;
886    }
887
888    camera3_stream_t *stream = buffer_set->stream;
889    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
890
891    //set the buffer_set in the mStreamInfo array
892    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
893            it != mStreamInfo.end(); it++) {
894        if ((*it)->stream == stream) {
895            uint32_t numBuffers = buffer_set->num_buffers;
896            (*it)->buffer_set.stream = buffer_set->stream;
897            (*it)->buffer_set.num_buffers = numBuffers;
898            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
899            if ((*it)->buffer_set.buffers == NULL) {
900                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
901                pthread_mutex_unlock(&mMutex);
902                return -ENOMEM;
903            }
904            for (size_t j = 0; j < numBuffers; j++){
905                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
906            }
907            (*it)->registered = 1;
908        }
909    }
910    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
911    if (rc < 0) {
912        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
913        pthread_mutex_unlock(&mMutex);
914        return -ENODEV;
915    }
916
917    pthread_mutex_unlock(&mMutex);
918    return NO_ERROR;
919}
920
921/*===========================================================================
922 * FUNCTION   : processCaptureRequest
923 *
924 * DESCRIPTION: process a capture request from camera service
925 *
926 * PARAMETERS :
927 *   @request : request from framework to process
928 *
929 * RETURN     :
930 *
931 *==========================================================================*/
932int QCamera3HardwareInterface::processCaptureRequest(
933                    camera3_capture_request_t *request)
934{
935    int rc = NO_ERROR;
936    int32_t request_id;
937    CameraMetadata meta;
938    MetadataBufferInfo reproc_meta;
939    int queueMetadata = 0;
940
941    pthread_mutex_lock(&mMutex);
942
943    rc = validateCaptureRequest(request);
944    if (rc != NO_ERROR) {
945        ALOGE("%s: incoming request is not valid", __func__);
946        pthread_mutex_unlock(&mMutex);
947        return rc;
948    }
949
950    meta = request->settings;
951
952    // For first capture request, send capture intent, and
953    // stream on all streams
954    if (mFirstRequest) {
955
956        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
957            int32_t hal_version = CAM_HAL_V3;
958            uint8_t captureIntent =
959                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
960
961            memset(mParameters, 0, sizeof(parm_buffer_t));
962            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
963            AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
964                sizeof(hal_version), &hal_version);
965            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
966                sizeof(captureIntent), &captureIntent);
967            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
968                mParameters);
969        }
970
971        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
972            it != mStreamInfo.end(); it++) {
973            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
974            channel->start();
975        }
976    }
977
978    uint32_t frameNumber = request->frame_number;
979    uint32_t streamTypeMask = 0;
980
981    if (meta.exists(ANDROID_REQUEST_ID)) {
982        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
983        mCurrentRequestId = request_id;
984        ALOGV("%s: Received request with id: %d",__func__, request_id);
985    } else if (mFirstRequest || mCurrentRequestId == -1){
986        ALOGE("%s: Unable to find request id field, \
987                & no previous id available", __func__);
988        return NAME_NOT_FOUND;
989    } else {
990        ALOGV("%s: Re-using old request id", __func__);
991        request_id = mCurrentRequestId;
992    }
993
994    ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
995                                    __func__, __LINE__,
996                                    request->num_output_buffers,
997                                    request->input_buffer,
998                                    frameNumber);
999    // Acquire all request buffers first
1000    int blob_request = 0;
1001    for (size_t i = 0; i < request->num_output_buffers; i++) {
1002        const camera3_stream_buffer_t& output = request->output_buffers[i];
1003        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1004        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1005
1006        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1007        //Call function to store local copy of jpeg data for encode params.
1008            blob_request = 1;
1009            rc = getJpegSettings(request->settings);
1010            if (rc < 0) {
1011                ALOGE("%s: failed to get jpeg parameters", __func__);
1012                pthread_mutex_unlock(&mMutex);
1013                return rc;
1014            }
1015        }
1016
1017        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1018        if (rc != OK) {
1019            ALOGE("%s: fence wait failed %d", __func__, rc);
1020            pthread_mutex_unlock(&mMutex);
1021            return rc;
1022        }
1023        streamTypeMask |= channel->getStreamTypeMask();
1024    }
1025
1026    rc = setFrameParameters(request, streamTypeMask);
1027    if (rc < 0) {
1028        ALOGE("%s: fail to set frame parameters", __func__);
1029        pthread_mutex_unlock(&mMutex);
1030        return rc;
1031    }
1032
1033    /* Update pending request list and pending buffers map */
1034    PendingRequestInfo pendingRequest;
1035    pendingRequest.frame_number = frameNumber;
1036    pendingRequest.num_buffers = request->num_output_buffers;
1037    pendingRequest.request_id = request_id;
1038    pendingRequest.blob_request = blob_request;
1039    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1040
1041    for (size_t i = 0; i < request->num_output_buffers; i++) {
1042        RequestedBufferInfo requestedBuf;
1043        requestedBuf.stream = request->output_buffers[i].stream;
1044        requestedBuf.buffer = NULL;
1045        pendingRequest.buffers.push_back(requestedBuf);
1046
1047        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
1048    }
1049    mPendingRequestsList.push_back(pendingRequest);
1050
1051    // Notify metadata channel we receive a request
1052    mMetadataChannel->request(NULL, frameNumber);
1053
1054    // Call request on other streams
1055    for (size_t i = 0; i < request->num_output_buffers; i++) {
1056        const camera3_stream_buffer_t& output = request->output_buffers[i];
1057        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1058        mm_camera_buf_def_t *pInputBuffer = NULL;
1059
1060        if (channel == NULL) {
1061            ALOGE("%s: invalid channel pointer for stream", __func__);
1062            continue;
1063        }
1064
1065        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1066            QCamera3RegularChannel* inputChannel = NULL;
1067            if(request->input_buffer != NULL){
1068                //Try to get the internal format
1069                inputChannel = (QCamera3RegularChannel*)
1070                    request->input_buffer->stream->priv;
1071                if(inputChannel == NULL ){
1072                    ALOGE("%s: failed to get input channel handle", __func__);
1073                } else {
1074                    pInputBuffer =
1075                        inputChannel->getInternalFormatBuffer(
1076                                request->input_buffer->buffer);
1077                    ALOGD("%s: Input buffer dump",__func__);
1078                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1079                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1080                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1081                    //TODO: need to get corresponding metadata and send it to pproc
1082                    for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1083                         m != mStoredMetadataList.end(); m++) {
1084                        if (m->zsl_buf_hdl == request->input_buffer->buffer) {
1085                            reproc_meta.meta_buf = m->meta_buf;
1086                            m = mStoredMetadataList.erase(m);
1087                            queueMetadata = 1;
1088                            break;
1089                        }
1090                    }
1091                }
1092            }
1093            rc = channel->request(output.buffer, frameNumber, mJpegSettings,
1094                            pInputBuffer,(QCamera3Channel*)inputChannel);
1095            if (queueMetadata) {
1096                mPictureChannel->queueMetadata(reproc_meta.meta_buf);
1097            }
1098        } else {
1099            ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__,
1100                __LINE__, output.buffer, frameNumber);
1101            if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1102                for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1103                     m != mStoredMetadataList.end(); m++) {
1104                   for (uint32_t j = 0; j < request->num_output_buffers; j++) {
1105                        if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
1106                            mMetadataChannel->bufDone(m->meta_buf);
1107                            free(m->meta_buf);
1108                            m = mStoredMetadataList.erase(m);
1109                            break;
1110                        }
1111                   }
1112                }
1113            }
1114            rc = channel->request(output.buffer, frameNumber);
1115        }
1116        if (rc < 0)
1117            ALOGE("%s: request failed", __func__);
1118    }
1119
1120    mFirstRequest = false;
1121
1122    //Block on conditional variable
1123    mPendingRequest = 1;
1124    while (mPendingRequest == 1) {
1125        pthread_cond_wait(&mRequestCond, &mMutex);
1126    }
1127
1128    pthread_mutex_unlock(&mMutex);
1129    return rc;
1130}
1131
1132/*===========================================================================
1133 * FUNCTION   : getMetadataVendorTagOps
1134 *
1135 * DESCRIPTION:
1136 *
1137 * PARAMETERS :
1138 *
1139 *
1140 * RETURN     :
1141 *==========================================================================*/
1142void QCamera3HardwareInterface::getMetadataVendorTagOps(
1143                    vendor_tag_query_ops_t* /*ops*/)
1144{
1145    /* Enable locks when we eventually add Vendor Tags */
1146    /*
1147    pthread_mutex_lock(&mMutex);
1148
1149    pthread_mutex_unlock(&mMutex);
1150    */
1151    return;
1152}
1153
1154/*===========================================================================
1155 * FUNCTION   : dump
1156 *
1157 * DESCRIPTION:
1158 *
1159 * PARAMETERS :
1160 *
1161 *
1162 * RETURN     :
1163 *==========================================================================*/
1164void QCamera3HardwareInterface::dump(int /*fd*/)
1165{
1166    /*Enable lock when we implement this function*/
1167    /*
1168    pthread_mutex_lock(&mMutex);
1169
1170    pthread_mutex_unlock(&mMutex);
1171    */
1172    return;
1173}
1174
1175/*===========================================================================
1176 * FUNCTION   : flush
1177 *
1178 * DESCRIPTION:
1179 *
1180 * PARAMETERS :
1181 *
1182 *
1183 * RETURN     :
1184 *==========================================================================*/
1185int QCamera3HardwareInterface::flush()
1186{
1187    /*Enable lock when we implement this function*/
1188    /*
1189    pthread_mutex_lock(&mMutex);
1190
1191    pthread_mutex_unlock(&mMutex);
1192    */
1193    return 0;
1194}
1195
1196/*===========================================================================
1197 * FUNCTION   : captureResultCb
1198 *
1199 * DESCRIPTION: Callback handler for all capture result
1200 *              (streams, as well as metadata)
1201 *
1202 * PARAMETERS :
1203 *   @metadata : metadata information
1204 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1205 *               NULL if metadata.
1206 *
1207 * RETURN     : NONE
1208 *==========================================================================*/
1209void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1210                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1211{
1212    pthread_mutex_lock(&mMutex);
1213
1214    if (metadata_buf) {
1215        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1216        int32_t frame_number_valid = *(int32_t *)
1217            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1218        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1219            CAM_INTF_META_PENDING_REQUESTS, metadata);
1220        uint32_t frame_number = *(uint32_t *)
1221            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1222        const struct timeval *tv = (const struct timeval *)
1223            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1224        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1225            tv->tv_usec * NSEC_PER_USEC;
1226
1227        if (!frame_number_valid) {
1228            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1229            mMetadataChannel->bufDone(metadata_buf);
1230            goto done_metadata;
1231        }
1232        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1233                frame_number, capture_time);
1234
1235        // Go through the pending requests info and send shutter/results to frameworks
1236        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1237                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1238            camera3_capture_result_t result;
1239            camera3_notify_msg_t notify_msg;
1240            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1241
1242            // Flush out all entries with less or equal frame numbers.
1243
1244            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1245            //Right now it's the same as metadata timestamp
1246
1247            //TODO: When there is metadata drop, how do we derive the timestamp of
1248            //dropped frames? For now, we fake the dropped timestamp by substracting
1249            //from the reported timestamp
1250            nsecs_t current_capture_time = capture_time -
1251                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1252
1253            // Send shutter notify to frameworks
1254            notify_msg.type = CAMERA3_MSG_SHUTTER;
1255            notify_msg.message.shutter.frame_number = i->frame_number;
1256            notify_msg.message.shutter.timestamp = current_capture_time;
1257            mCallbackOps->notify(mCallbackOps, &notify_msg);
1258            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1259                    i->frame_number, capture_time);
1260
1261            // Send empty metadata with already filled buffers for dropped metadata
1262            // and send valid metadata with already filled buffers for current metadata
1263            if (i->frame_number < frame_number) {
1264                CameraMetadata dummyMetadata;
1265                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1266                        &current_capture_time, 1);
1267                dummyMetadata.update(ANDROID_REQUEST_ID,
1268                        &(i->request_id), 1);
1269                result.result = dummyMetadata.release();
1270            } else {
1271                result.result = translateCbMetadataToResultMetadata(metadata,
1272                        current_capture_time, i->request_id);
1273                if (mIsZslMode) {
1274                   int found_metadata = 0;
1275                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
1276                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1277                        j != i->buffers.end(); j++) {
1278                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1279                         //check if corresp. zsl already exists in the stored metadata list
1280                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
1281                               m != mStoredMetadataList.begin(); m++) {
1282                            if (m->frame_number == frame_number) {
1283                               m->meta_buf = metadata_buf;
1284                               found_metadata = 1;
1285                               break;
1286                            }
1287                         }
1288                         if (!found_metadata) {
1289                            MetadataBufferInfo store_meta_info;
1290                            store_meta_info.meta_buf = metadata_buf;
1291                            store_meta_info.frame_number = frame_number;
1292                            mStoredMetadataList.push_back(store_meta_info);
1293                            found_metadata = 1;
1294                         }
1295                      }
1296                   }
1297                   if (!found_metadata) {
1298                       if (!i->input_buffer_present && i->blob_request) {
1299                          //livesnapshot or fallback non-zsl snapshot case
1300                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1301                                j != i->buffers.end(); j++){
1302                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
1303                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1304                                 mPictureChannel->queueMetadata(metadata_buf);
1305                                 break;
1306                              }
1307                         }
1308                       } else {
1309                            //return the metadata immediately
1310                            mMetadataChannel->bufDone(metadata_buf);
1311                            free(metadata_buf);
1312                       }
1313                   }
1314               } else if (!mIsZslMode && i->blob_request) {
1315                   //If it is a blob request then send the metadata to the picture channel
1316                   mPictureChannel->queueMetadata(metadata_buf);
1317               } else {
1318                   // Return metadata buffer
1319                   mMetadataChannel->bufDone(metadata_buf);
1320                   free(metadata_buf);
1321               }
1322
1323            }
1324            if (!result.result) {
1325                ALOGE("%s: metadata is NULL", __func__);
1326            }
1327            result.frame_number = i->frame_number;
1328            result.num_output_buffers = 0;
1329            result.output_buffers = NULL;
1330            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1331                    j != i->buffers.end(); j++) {
1332                if (j->buffer) {
1333                    result.num_output_buffers++;
1334                }
1335            }
1336
1337            if (result.num_output_buffers > 0) {
1338                camera3_stream_buffer_t *result_buffers =
1339                    new camera3_stream_buffer_t[result.num_output_buffers];
1340                if (!result_buffers) {
1341                    ALOGE("%s: Fatal error: out of memory", __func__);
1342                }
1343                size_t result_buffers_idx = 0;
1344                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1345                        j != i->buffers.end(); j++) {
1346                    if (j->buffer) {
1347                        result_buffers[result_buffers_idx++] = *(j->buffer);
1348                        free(j->buffer);
1349                        j->buffer = NULL;
1350                        mPendingBuffersMap.editValueFor(j->stream)--;
1351                    }
1352                }
1353                result.output_buffers = result_buffers;
1354
1355                mCallbackOps->process_capture_result(mCallbackOps, &result);
1356                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1357                        __func__, result.frame_number, current_capture_time);
1358                free_camera_metadata((camera_metadata_t *)result.result);
1359                delete[] result_buffers;
1360            } else {
1361                mCallbackOps->process_capture_result(mCallbackOps, &result);
1362                ALOGE("%s: meta frame_number = %d, capture_time = %lld",
1363                        __func__, result.frame_number, current_capture_time);
1364                free_camera_metadata((camera_metadata_t *)result.result);
1365            }
1366            // erase the element from the list
1367            i = mPendingRequestsList.erase(i);
1368        }
1369
1370
1371done_metadata:
1372        bool max_buffers_dequeued = false;
1373        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1374            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1375            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1376            if (queued_buffers == stream->max_buffers) {
1377                max_buffers_dequeued = true;
1378                break;
1379            }
1380        }
1381        if (!max_buffers_dequeued && !pending_requests) {
1382            // Unblock process_capture_request
1383            mPendingRequest = 0;
1384            pthread_cond_signal(&mRequestCond);
1385        }
1386    } else {
1387        // If the frame number doesn't exist in the pending request list,
1388        // directly send the buffer to the frameworks, and update pending buffers map
1389        // Otherwise, book-keep the buffer.
1390        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1391        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1392            i++;
1393        }
1394        if (i == mPendingRequestsList.end()) {
1395            // Verify all pending requests frame_numbers are greater
1396            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1397                    j != mPendingRequestsList.end(); j++) {
1398                if (j->frame_number < frame_number) {
1399                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1400                            __func__, j->frame_number, frame_number);
1401                }
1402            }
1403            camera3_capture_result_t result;
1404            result.result = NULL;
1405            result.frame_number = frame_number;
1406            result.num_output_buffers = 1;
1407            result.output_buffers = buffer;
1408            ALOGV("%s: result frame_number = %d, buffer = %p",
1409                    __func__, frame_number, buffer);
1410            mPendingBuffersMap.editValueFor(buffer->stream)--;
1411            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1412                int found = 0;
1413                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
1414                      k != mStoredMetadataList.end(); k++) {
1415                    if (k->frame_number == frame_number) {
1416                        k->zsl_buf_hdl = buffer->buffer;
1417                        found = 1;
1418                        break;
1419                    }
1420                }
1421                if (!found) {
1422                   MetadataBufferInfo meta_info;
1423                   meta_info.frame_number = frame_number;
1424                   meta_info.zsl_buf_hdl = buffer->buffer;
1425                   mStoredMetadataList.push_back(meta_info);
1426                }
1427            }
1428            mCallbackOps->process_capture_result(mCallbackOps, &result);
1429        } else {
1430            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1431                    j != i->buffers.end(); j++) {
1432                if (j->stream == buffer->stream) {
1433                    if (j->buffer != NULL) {
1434                        ALOGE("%s: Error: buffer is already set", __func__);
1435                    } else {
1436                        j->buffer = (camera3_stream_buffer_t *)malloc(
1437                                sizeof(camera3_stream_buffer_t));
1438                        *(j->buffer) = *buffer;
1439                        ALOGV("%s: cache buffer %p at result frame_number %d",
1440                                __func__, buffer, frame_number);
1441                    }
1442                }
1443            }
1444        }
1445    }
1446    pthread_mutex_unlock(&mMutex);
1447    return;
1448}
1449
1450/*===========================================================================
1451 * FUNCTION   : translateCbMetadataToResultMetadata
1452 *
1453 * DESCRIPTION:
1454 *
1455 * PARAMETERS :
1456 *   @metadata : metadata information from callback
1457 *
1458 * RETURN     : camera_metadata_t*
1459 *              metadata in a format specified by fwk
1460 *==========================================================================*/
1461camera_metadata_t*
1462QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1463                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1464                                 int32_t request_id)
1465{
1466    CameraMetadata camMetadata;
1467    camera_metadata_t* resultMetadata;
1468
1469    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1470    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1471
1472    /*CAM_INTF_META_HISTOGRAM - TODO*/
1473    /*cam_hist_stats_t  *histogram =
1474      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1475      metadata);*/
1476
1477    /*face detection*/
1478    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1479        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1480    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1481    int32_t faceIds[numFaces];
1482    uint8_t faceScores[numFaces];
1483    int32_t faceRectangles[numFaces * 4];
1484    int32_t faceLandmarks[numFaces * 6];
1485    int j = 0, k = 0;
1486    for (int i = 0; i < numFaces; i++) {
1487        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1488        faceScores[i] = faceDetectionInfo->faces[i].score;
1489        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1490                faceRectangles+j, -1);
1491        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1492        j+= 4;
1493        k+= 6;
1494    }
1495    if (numFaces > 0) {
1496        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1497        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1498        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1499            faceRectangles, numFaces*4);
1500        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1501            faceLandmarks, numFaces*6);
1502    }
1503
1504    uint8_t  *color_correct_mode =
1505        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1506    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1507
1508    int32_t  *ae_precapture_id =
1509        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1510    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1511
1512    /*aec regions*/
1513    cam_area_t  *hAeRegions =
1514        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1515    int32_t aeRegions[5];
1516    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1517    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1518
1519    uint8_t *ae_state =
1520            (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1521    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1522
1523    uint8_t  *focusMode =
1524        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1525    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1526
1527    /*af regions*/
1528    cam_area_t  *hAfRegions =
1529        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1530    int32_t afRegions[5];
1531    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1532    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1533
1534    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1535    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1536
1537    int32_t  *afTriggerId =
1538        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1539    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1540
1541    uint8_t  *whiteBalance =
1542        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1543    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1544
1545    /*awb regions*/
1546    cam_area_t  *hAwbRegions =
1547        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1548    int32_t awbRegions[5];
1549    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1550    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1551
1552    uint8_t  *whiteBalanceState =
1553        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1554    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1555
1556    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1557    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1558
1559    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1560    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1561
1562    uint8_t  *flashPower =
1563        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1564    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1565
1566    int64_t  *flashFiringTime =
1567        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1568    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1569
1570    /*int32_t  *ledMode =
1571      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1572      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1573
1574    uint8_t  *flashState =
1575        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1576    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1577
1578    uint8_t  *hotPixelMode =
1579        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1580    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1581
1582    float  *lensAperture =
1583        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1584    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1585
1586    float  *filterDensity =
1587        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1588    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1589
1590    float  *focalLength =
1591        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1592    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1593
1594    float  *focusDistance =
1595        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1596    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1597
1598    float  *focusRange =
1599        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1600    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1601
1602    uint8_t  *opticalStab =
1603        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1604    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1605
1606    /*int32_t  *focusState =
1607      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1608      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1609
1610    uint8_t  *noiseRedMode =
1611        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1612    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1613
1614    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1615
1616    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1617        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1618    int32_t scalerCropRegion[4];
1619    scalerCropRegion[0] = hScalerCropRegion->left;
1620    scalerCropRegion[1] = hScalerCropRegion->top;
1621    scalerCropRegion[2] = hScalerCropRegion->width;
1622    scalerCropRegion[3] = hScalerCropRegion->height;
1623    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1624
1625    int64_t  *sensorExpTime =
1626        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1627    mMetadataResponse.exposure_time = *sensorExpTime;
1628    ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
1629    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1630
1631    int64_t  *sensorFameDuration =
1632        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1633    ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
1634    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1635
1636    int32_t  *sensorSensitivity =
1637        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1638    ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
1639    mMetadataResponse.iso_speed = *sensorSensitivity;
1640    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1641
1642    uint8_t  *shadingMode =
1643        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1644    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1645
1646    uint8_t  *faceDetectMode =
1647        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1648    uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
1649        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
1650        *faceDetectMode);
1651    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
1652
1653    uint8_t  *histogramMode =
1654        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1655    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1656
1657    uint8_t  *sharpnessMapMode =
1658        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1659    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1660            sharpnessMapMode, 1);
1661
1662    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1663    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1664        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1665    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1666            (int32_t*)sharpnessMap->sharpness,
1667            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1668
1669    cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1670        POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1671    int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1672    int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1673    camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1674                       (float*)lensShadingMap->lens_shading,
1675                       4*map_width*map_height);
1676
1677    cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1678        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1679    camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1680
1681    cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1682        POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1683    camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1684                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1685
1686    cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1687        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1688    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1689                       predColorCorrectionGains->gains, 4);
1690
1691    cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1692        POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1693    camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1694                       (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1695
1696    uint8_t *blackLevelLock = (uint8_t*)
1697        POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1698    camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1699
1700    uint8_t *sceneFlicker = (uint8_t*)
1701        POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1702    camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1703
1704
1705    resultMetadata = camMetadata.release();
1706    return resultMetadata;
1707}
1708
1709/*===========================================================================
1710 * FUNCTION   : convertToRegions
1711 *
1712 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1713 *
1714 * PARAMETERS :
1715 *   @rect   : cam_rect_t struct to convert
1716 *   @region : int32_t destination array
1717 *   @weight : if we are converting from cam_area_t, weight is valid
1718 *             else weight = -1
1719 *
1720 *==========================================================================*/
1721void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1722    region[0] = rect.left;
1723    region[1] = rect.top;
1724    region[2] = rect.left + rect.width;
1725    region[3] = rect.top + rect.height;
1726    if (weight > -1) {
1727        region[4] = weight;
1728    }
1729}
1730
1731/*===========================================================================
1732 * FUNCTION   : convertFromRegions
1733 *
1734 * DESCRIPTION: helper method to convert from array to cam_rect_t
1735 *
1736 * PARAMETERS :
1737 *   @rect   : cam_rect_t struct to convert
1738 *   @region : int32_t destination array
1739 *   @weight : if we are converting from cam_area_t, weight is valid
1740 *             else weight = -1
1741 *
1742 *==========================================================================*/
1743void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1744                                                   const camera_metadata_t *settings,
1745                                                   uint32_t tag){
1746    CameraMetadata frame_settings;
1747    frame_settings = settings;
1748    int32_t x_min = frame_settings.find(tag).data.i32[0];
1749    int32_t y_min = frame_settings.find(tag).data.i32[1];
1750    int32_t x_max = frame_settings.find(tag).data.i32[2];
1751    int32_t y_max = frame_settings.find(tag).data.i32[3];
1752    roi->weight = frame_settings.find(tag).data.i32[4];
1753    roi->rect.left = x_min;
1754    roi->rect.top = y_min;
1755    roi->rect.width = x_max - x_min;
1756    roi->rect.height = y_max - y_min;
1757}
1758
1759/*===========================================================================
1760 * FUNCTION   : resetIfNeededROI
1761 *
1762 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1763 *              crop region
1764 *
1765 * PARAMETERS :
1766 *   @roi       : cam_area_t struct to resize
1767 *   @scalerCropRegion : cam_crop_region_t region to compare against
1768 *
1769 *
1770 *==========================================================================*/
1771bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1772                                                 const cam_crop_region_t* scalerCropRegion)
1773{
1774    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1775    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1776    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1777    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1778    if ((roi_x_max < scalerCropRegion->left) ||
1779        (roi_y_max < scalerCropRegion->top)  ||
1780        (roi->rect.left > crop_x_max) ||
1781        (roi->rect.top > crop_y_max)){
1782        return false;
1783    }
1784    if (roi->rect.left < scalerCropRegion->left) {
1785        roi->rect.left = scalerCropRegion->left;
1786    }
1787    if (roi->rect.top < scalerCropRegion->top) {
1788        roi->rect.top = scalerCropRegion->top;
1789    }
1790    if (roi_x_max > crop_x_max) {
1791        roi_x_max = crop_x_max;
1792    }
1793    if (roi_y_max > crop_y_max) {
1794        roi_y_max = crop_y_max;
1795    }
1796    roi->rect.width = roi_x_max - roi->rect.left;
1797    roi->rect.height = roi_y_max - roi->rect.top;
1798    return true;
1799}
1800
1801/*===========================================================================
1802 * FUNCTION   : convertLandmarks
1803 *
1804 * DESCRIPTION: helper method to extract the landmarks from face detection info
1805 *
1806 * PARAMETERS :
1807 *   @face   : cam_rect_t struct to convert
1808 *   @landmarks : int32_t destination array
1809 *
1810 *
1811 *==========================================================================*/
1812void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1813{
1814    landmarks[0] = face.left_eye_center.x;
1815    landmarks[1] = face.left_eye_center.y;
1816    landmarks[2] = face.right_eye_center.y;
1817    landmarks[3] = face.right_eye_center.y;
1818    landmarks[4] = face.mouth_center.x;
1819    landmarks[5] = face.mouth_center.y;
1820}
1821
1822#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1823/*===========================================================================
1824 * FUNCTION   : initCapabilities
1825 *
1826 * DESCRIPTION: initialize camera capabilities in static data struct
1827 *
1828 * PARAMETERS :
1829 *   @cameraId  : camera Id
1830 *
1831 * RETURN     : int32_t type of status
1832 *              NO_ERROR  -- success
1833 *              none-zero failure code
1834 *==========================================================================*/
1835int QCamera3HardwareInterface::initCapabilities(int cameraId)
1836{
1837    int rc = 0;
1838    mm_camera_vtbl_t *cameraHandle = NULL;
1839    QCamera3HeapMemory *capabilityHeap = NULL;
1840
1841    cameraHandle = camera_open(cameraId);
1842    if (!cameraHandle) {
1843        ALOGE("%s: camera_open failed", __func__);
1844        rc = -1;
1845        goto open_failed;
1846    }
1847
1848    capabilityHeap = new QCamera3HeapMemory();
1849    if (capabilityHeap == NULL) {
1850        ALOGE("%s: creation of capabilityHeap failed", __func__);
1851        goto heap_creation_failed;
1852    }
1853    /* Allocate memory for capability buffer */
1854    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1855    if(rc != OK) {
1856        ALOGE("%s: No memory for cappability", __func__);
1857        goto allocate_failed;
1858    }
1859
1860    /* Map memory for capability buffer */
1861    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1862    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1863                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1864                                capabilityHeap->getFd(0),
1865                                sizeof(cam_capability_t));
1866    if(rc < 0) {
1867        ALOGE("%s: failed to map capability buffer", __func__);
1868        goto map_failed;
1869    }
1870
1871    /* Query Capability */
1872    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1873    if(rc < 0) {
1874        ALOGE("%s: failed to query capability",__func__);
1875        goto query_failed;
1876    }
1877    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1878    if (!gCamCapability[cameraId]) {
1879        ALOGE("%s: out of memory", __func__);
1880        goto query_failed;
1881    }
1882    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1883                                        sizeof(cam_capability_t));
1884    rc = 0;
1885
1886query_failed:
1887    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1888                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1889map_failed:
1890    capabilityHeap->deallocate();
1891allocate_failed:
1892    delete capabilityHeap;
1893heap_creation_failed:
1894    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1895    cameraHandle = NULL;
1896open_failed:
1897    return rc;
1898}
1899
1900/*===========================================================================
1901 * FUNCTION   : initParameters
1902 *
1903 * DESCRIPTION: initialize camera parameters
1904 *
1905 * PARAMETERS :
1906 *
1907 * RETURN     : int32_t type of status
1908 *              NO_ERROR  -- success
1909 *              none-zero failure code
1910 *==========================================================================*/
1911int QCamera3HardwareInterface::initParameters()
1912{
1913    int rc = 0;
1914
1915    //Allocate Set Param Buffer
1916    mParamHeap = new QCamera3HeapMemory();
1917    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1918    if(rc != OK) {
1919        rc = NO_MEMORY;
1920        ALOGE("Failed to allocate SETPARM Heap memory");
1921        delete mParamHeap;
1922        mParamHeap = NULL;
1923        return rc;
1924    }
1925
1926    //Map memory for parameters buffer
1927    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1928            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1929            mParamHeap->getFd(0),
1930            sizeof(parm_buffer_t));
1931    if(rc < 0) {
1932        ALOGE("%s:failed to map SETPARM buffer",__func__);
1933        rc = FAILED_TRANSACTION;
1934        mParamHeap->deallocate();
1935        delete mParamHeap;
1936        mParamHeap = NULL;
1937        return rc;
1938    }
1939
1940    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1941    return rc;
1942}
1943
1944/*===========================================================================
1945 * FUNCTION   : deinitParameters
1946 *
1947 * DESCRIPTION: de-initialize camera parameters
1948 *
1949 * PARAMETERS :
1950 *
1951 * RETURN     : NONE
1952 *==========================================================================*/
1953void QCamera3HardwareInterface::deinitParameters()
1954{
1955    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1956            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1957
1958    mParamHeap->deallocate();
1959    delete mParamHeap;
1960    mParamHeap = NULL;
1961
1962    mParameters = NULL;
1963}
1964
1965/*===========================================================================
1966 * FUNCTION   : calcMaxJpegSize
1967 *
1968 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1969 *
1970 * PARAMETERS :
1971 *
1972 * RETURN     : max_jpeg_size
1973 *==========================================================================*/
1974int QCamera3HardwareInterface::calcMaxJpegSize()
1975{
1976    int32_t max_jpeg_size = 0;
1977    int temp_width, temp_height;
1978    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1979        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1980        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1981        if (temp_width * temp_height > max_jpeg_size ) {
1982            max_jpeg_size = temp_width * temp_height;
1983        }
1984    }
1985    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1986    return max_jpeg_size;
1987}
1988
1989/*===========================================================================
1990 * FUNCTION   : initStaticMetadata
1991 *
1992 * DESCRIPTION: initialize the static metadata
1993 *
1994 * PARAMETERS :
1995 *   @cameraId  : camera Id
1996 *
1997 * RETURN     : int32_t type of status
1998 *              0  -- success
1999 *              non-zero failure code
2000 *==========================================================================*/
2001int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
2002{
2003    int rc = 0;
2004    CameraMetadata staticInfo;
2005
2006    /* android.info: hardware level */
2007    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
2008    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2009        &supportedHardwareLevel, 1);
2010
2011    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
2012    /*HAL 3 only*/
2013    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2014                    &gCamCapability[cameraId]->min_focus_distance, 1); */
2015
2016    /*hard coded for now but this should come from sensor*/
2017    float min_focus_distance;
2018    if(facingBack){
2019        min_focus_distance = 10;
2020    } else {
2021        min_focus_distance = 0;
2022    }
2023    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2024                    &min_focus_distance, 1);
2025
2026    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2027                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
2028
2029    /*should be using focal lengths but sensor doesn't provide that info now*/
2030    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2031                      &gCamCapability[cameraId]->focal_length,
2032                      1);
2033
2034    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2035                      gCamCapability[cameraId]->apertures,
2036                      gCamCapability[cameraId]->apertures_count);
2037
2038    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2039                gCamCapability[cameraId]->filter_densities,
2040                gCamCapability[cameraId]->filter_densities_count);
2041
2042
2043    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2044                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
2045                      gCamCapability[cameraId]->optical_stab_modes_count);
2046
2047    staticInfo.update(ANDROID_LENS_POSITION,
2048                      gCamCapability[cameraId]->lens_position,
2049                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
2050
2051    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
2052                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
2053    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
2054                      lens_shading_map_size,
2055                      sizeof(lens_shading_map_size)/sizeof(int32_t));
2056
2057    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
2058                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
2059    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
2060            geo_correction_map_size,
2061            sizeof(geo_correction_map_size)/sizeof(int32_t));
2062
2063    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
2064                       gCamCapability[cameraId]->geo_correction_map,
2065                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
2066
2067    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2068            gCamCapability[cameraId]->sensor_physical_size, 2);
2069
2070    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2071            gCamCapability[cameraId]->exposure_time_range, 2);
2072
2073    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2074            &gCamCapability[cameraId]->max_frame_duration, 1);
2075
2076
2077    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2078                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
2079
2080    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
2081                                               gCamCapability[cameraId]->pixel_array_size.height};
2082    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2083                      pixel_array_size, 2);
2084
2085    int32_t active_array_size[] = {0, 0,
2086                                                gCamCapability[cameraId]->active_array_size.width,
2087                                                gCamCapability[cameraId]->active_array_size.height};
2088    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2089                      active_array_size, 4);
2090
2091    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2092            &gCamCapability[cameraId]->white_level, 1);
2093
2094    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2095            gCamCapability[cameraId]->black_level_pattern, 4);
2096
2097    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
2098                      &gCamCapability[cameraId]->flash_charge_duration, 1);
2099
2100    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
2101                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
2102
2103    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2104                      (int*)&gCamCapability[cameraId]->max_num_roi, 1);
2105
2106    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2107                      &gCamCapability[cameraId]->histogram_size, 1);
2108
2109    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2110            &gCamCapability[cameraId]->max_histogram_count, 1);
2111
2112    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
2113                                                gCamCapability[cameraId]->sharpness_map_size.height};
2114
2115    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2116            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
2117
2118    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2119            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
2120
2121
2122    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2123                      &gCamCapability[cameraId]->raw_min_duration,
2124                       1);
2125
2126    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
2127                                                HAL_PIXEL_FORMAT_BLOB};
2128    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
2129    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
2130                      scalar_formats,
2131                      scalar_formats_count);
2132
2133    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
2134    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
2135              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
2136              available_processed_sizes);
2137    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2138                available_processed_sizes,
2139                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
2140
2141    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2142                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2143                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2144
2145    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
2146    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
2147                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
2148                 available_fps_ranges);
2149    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2150            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
2151
2152    camera_metadata_rational exposureCompensationStep = {
2153            gCamCapability[cameraId]->exp_compensation_step.numerator,
2154            gCamCapability[cameraId]->exp_compensation_step.denominator};
2155    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2156                      &exposureCompensationStep, 1);
2157
2158    /*TO DO*/
2159    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2160    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2161                      availableVstabModes, sizeof(availableVstabModes));
2162
2163    /*HAL 1 and HAL 3 common*/
2164    float maxZoom = 4;
2165    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2166            &maxZoom, 1);
2167
2168    int32_t max3aRegions = 1;
2169    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
2170            &max3aRegions, 1);
2171
2172    uint8_t availableFaceDetectModes[] = {
2173            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2174            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
2175    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2176                      availableFaceDetectModes,
2177                      sizeof(availableFaceDetectModes));
2178
2179    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
2180                                       gCamCapability[cameraId]->raw_dim.height};
2181    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2182                      raw_size,
2183                      sizeof(raw_size)/sizeof(uint32_t));
2184
2185    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
2186                                                        gCamCapability[cameraId]->exposure_compensation_max};
2187    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2188            exposureCompensationRange,
2189            sizeof(exposureCompensationRange)/sizeof(int32_t));
2190
2191    uint8_t lensFacing = (facingBack) ?
2192            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2193    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
2194
2195    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2196                available_processed_sizes,
2197                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
2198
2199    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2200                      available_thumbnail_sizes,
2201                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
2202
2203    int32_t max_jpeg_size = 0;
2204    int temp_width, temp_height;
2205    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
2206        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
2207        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
2208        if (temp_width * temp_height > max_jpeg_size ) {
2209            max_jpeg_size = temp_width * temp_height;
2210        }
2211    }
2212    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
2213    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
2214                      &max_jpeg_size, 1);
2215
2216    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
2217    int32_t size = 0;
2218    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
2219        int val = lookupFwkName(EFFECT_MODES_MAP,
2220                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
2221                                   gCamCapability[cameraId]->supported_effects[i]);
2222        if (val != NAME_NOT_FOUND) {
2223            avail_effects[size] = (uint8_t)val;
2224            size++;
2225        }
2226    }
2227    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2228                      avail_effects,
2229                      size);
2230
2231    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
2232    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
2233    int32_t supported_scene_modes_cnt = 0;
2234    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
2235        int val = lookupFwkName(SCENE_MODES_MAP,
2236                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2237                                gCamCapability[cameraId]->supported_scene_modes[i]);
2238        if (val != NAME_NOT_FOUND) {
2239            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
2240            supported_indexes[supported_scene_modes_cnt] = i;
2241            supported_scene_modes_cnt++;
2242        }
2243    }
2244
2245    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2246                      avail_scene_modes,
2247                      supported_scene_modes_cnt);
2248
2249    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2250    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2251                      supported_scene_modes_cnt,
2252                      scene_mode_overrides,
2253                      supported_indexes,
2254                      cameraId);
2255    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2256                      scene_mode_overrides,
2257                      supported_scene_modes_cnt*3);
2258
2259    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2260    size = 0;
2261    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2262        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2263                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2264                                 gCamCapability[cameraId]->supported_antibandings[i]);
2265        if (val != NAME_NOT_FOUND) {
2266            avail_antibanding_modes[size] = (uint8_t)val;
2267            size++;
2268        }
2269
2270    }
2271    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2272                      avail_antibanding_modes,
2273                      size);
2274
2275    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2276    size = 0;
2277    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2278        int val = lookupFwkName(FOCUS_MODES_MAP,
2279                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2280                                gCamCapability[cameraId]->supported_focus_modes[i]);
2281        if (val != NAME_NOT_FOUND) {
2282            avail_af_modes[size] = (uint8_t)val;
2283            size++;
2284        }
2285    }
2286    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2287                      avail_af_modes,
2288                      size);
2289
2290    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2291    size = 0;
2292    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2293        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2294                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2295                                    gCamCapability[cameraId]->supported_white_balances[i]);
2296        if (val != NAME_NOT_FOUND) {
2297            avail_awb_modes[size] = (uint8_t)val;
2298            size++;
2299        }
2300    }
2301    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2302                      avail_awb_modes,
2303                      size);
2304
2305    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2306    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2307      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2308
2309    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2310            available_flash_levels,
2311            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2312
2313
2314    uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2315    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2316            &flashAvailable, 1);
2317
2318    uint8_t avail_ae_modes[5];
2319    size = 0;
2320    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2321        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2322        size++;
2323    }
2324    if (flashAvailable) {
2325        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2326        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2327        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2328    }
2329    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2330                      avail_ae_modes,
2331                      size);
2332
2333    int32_t sensitivity_range[2];
2334    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2335    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2336    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2337                      sensitivity_range,
2338                      sizeof(sensitivity_range) / sizeof(int32_t));
2339
2340    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2341                      &gCamCapability[cameraId]->max_analog_sensitivity,
2342                      1);
2343
2344    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2345                      &gCamCapability[cameraId]->jpeg_min_duration[0],
2346                      gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2347
2348    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2349    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2350                      &sensor_orientation,
2351                      1);
2352
2353    int32_t max_output_streams[3] = {1, 3, 1};
2354    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2355                      max_output_streams,
2356                      3);
2357
2358    gStaticMetadata[cameraId] = staticInfo.release();
2359    return rc;
2360}
2361
2362/*===========================================================================
2363 * FUNCTION   : makeTable
2364 *
2365 * DESCRIPTION: make a table of sizes
2366 *
2367 * PARAMETERS :
2368 *
2369 *
2370 *==========================================================================*/
2371void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2372                                          int32_t* sizeTable)
2373{
2374    int j = 0;
2375    for (int i = 0; i < size; i++) {
2376        sizeTable[j] = dimTable[i].width;
2377        sizeTable[j+1] = dimTable[i].height;
2378        j+=2;
2379    }
2380}
2381
2382/*===========================================================================
2383 * FUNCTION   : makeFPSTable
2384 *
2385 * DESCRIPTION: make a table of fps ranges
2386 *
2387 * PARAMETERS :
2388 *
2389 *==========================================================================*/
2390void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2391                                          int32_t* fpsRangesTable)
2392{
2393    int j = 0;
2394    for (int i = 0; i < size; i++) {
2395        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2396        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2397        j+=2;
2398    }
2399}
2400
2401/*===========================================================================
2402 * FUNCTION   : makeOverridesList
2403 *
2404 * DESCRIPTION: make a list of scene mode overrides
2405 *
2406 * PARAMETERS :
2407 *
2408 *
2409 *==========================================================================*/
2410void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2411                                                  uint8_t size, uint8_t* overridesList,
2412                                                  uint8_t* supported_indexes,
2413                                                  int camera_id)
2414{
2415    /*daemon will give a list of overrides for all scene modes.
2416      However we should send the fwk only the overrides for the scene modes
2417      supported by the framework*/
2418    int j = 0, index = 0, supt = 0;
2419    uint8_t focus_override;
2420    for (int i = 0; i < size; i++) {
2421        supt = 0;
2422        index = supported_indexes[i];
2423        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2424        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2425                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2426                                                    overridesTable[index].awb_mode);
2427        focus_override = (uint8_t)overridesTable[index].af_mode;
2428        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2429           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2430              supt = 1;
2431              break;
2432           }
2433        }
2434        if (supt) {
2435           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2436                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2437                                              focus_override);
2438        } else {
2439           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2440        }
2441        j+=3;
2442    }
2443}
2444
2445/*===========================================================================
2446 * FUNCTION   : getPreviewHalPixelFormat
2447 *
2448 * DESCRIPTION: convert the format to type recognized by framework
2449 *
2450 * PARAMETERS : format : the format from backend
2451 *
2452 ** RETURN    : format recognized by framework
2453 *
2454 *==========================================================================*/
2455int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2456{
2457    int32_t halPixelFormat;
2458
2459    switch (format) {
2460    case CAM_FORMAT_YUV_420_NV12:
2461        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2462        break;
2463    case CAM_FORMAT_YUV_420_NV21:
2464        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2465        break;
2466    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2467        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2468        break;
2469    case CAM_FORMAT_YUV_420_YV12:
2470        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2471        break;
2472    case CAM_FORMAT_YUV_422_NV16:
2473    case CAM_FORMAT_YUV_422_NV61:
2474    default:
2475        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2476        break;
2477    }
2478    return halPixelFormat;
2479}
2480
2481/*===========================================================================
2482 * FUNCTION   : getSensorSensitivity
2483 *
2484 * DESCRIPTION: convert iso_mode to an integer value
2485 *
2486 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2487 *
2488 ** RETURN    : sensitivity supported by sensor
2489 *
2490 *==========================================================================*/
2491int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2492{
2493    int32_t sensitivity;
2494
2495    switch (iso_mode) {
2496    case CAM_ISO_MODE_100:
2497        sensitivity = 100;
2498        break;
2499    case CAM_ISO_MODE_200:
2500        sensitivity = 200;
2501        break;
2502    case CAM_ISO_MODE_400:
2503        sensitivity = 400;
2504        break;
2505    case CAM_ISO_MODE_800:
2506        sensitivity = 800;
2507        break;
2508    case CAM_ISO_MODE_1600:
2509        sensitivity = 1600;
2510        break;
2511    default:
2512        sensitivity = -1;
2513        break;
2514    }
2515    return sensitivity;
2516}
2517
2518
2519/*===========================================================================
2520 * FUNCTION   : AddSetParmEntryToBatch
2521 *
2522 * DESCRIPTION: add set parameter entry into batch
2523 *
2524 * PARAMETERS :
2525 *   @p_table     : ptr to parameter buffer
2526 *   @paramType   : parameter type
2527 *   @paramLength : length of parameter value
2528 *   @paramValue  : ptr to parameter value
2529 *
2530 * RETURN     : int32_t type of status
2531 *              NO_ERROR  -- success
2532 *              none-zero failure code
2533 *==========================================================================*/
2534int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2535                                                          cam_intf_parm_type_t paramType,
2536                                                          uint32_t paramLength,
2537                                                          void *paramValue)
2538{
2539    int position = paramType;
2540    int current, next;
2541
2542    /*************************************************************************
2543    *                 Code to take care of linking next flags                *
2544    *************************************************************************/
2545    current = GET_FIRST_PARAM_ID(p_table);
2546    if (position == current){
2547        //DO NOTHING
2548    } else if (position < current){
2549        SET_NEXT_PARAM_ID(position, p_table, current);
2550        SET_FIRST_PARAM_ID(p_table, position);
2551    } else {
2552        /* Search for the position in the linked list where we need to slot in*/
2553        while (position > GET_NEXT_PARAM_ID(current, p_table))
2554            current = GET_NEXT_PARAM_ID(current, p_table);
2555
2556        /*If node already exists no need to alter linking*/
2557        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2558            next = GET_NEXT_PARAM_ID(current, p_table);
2559            SET_NEXT_PARAM_ID(current, p_table, position);
2560            SET_NEXT_PARAM_ID(position, p_table, next);
2561        }
2562    }
2563
2564    /*************************************************************************
2565    *                   Copy contents into entry                             *
2566    *************************************************************************/
2567
2568    if (paramLength > sizeof(parm_type_t)) {
2569        ALOGE("%s:Size of input larger than max entry size",__func__);
2570        return BAD_VALUE;
2571    }
2572    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2573    return NO_ERROR;
2574}
2575
2576/*===========================================================================
2577 * FUNCTION   : lookupFwkName
2578 *
2579 * DESCRIPTION: In case the enum is not same in fwk and backend
2580 *              make sure the parameter is correctly propogated
2581 *
2582 * PARAMETERS  :
2583 *   @arr      : map between the two enums
2584 *   @len      : len of the map
2585 *   @hal_name : name of the hal_parm to map
2586 *
2587 * RETURN     : int type of status
2588 *              fwk_name  -- success
2589 *              none-zero failure code
2590 *==========================================================================*/
2591int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2592                                             int len, int hal_name)
2593{
2594
2595    for (int i = 0; i < len; i++) {
2596        if (arr[i].hal_name == hal_name)
2597            return arr[i].fwk_name;
2598    }
2599
2600    /* Not able to find matching framework type is not necessarily
2601     * an error case. This happens when mm-camera supports more attributes
2602     * than the frameworks do */
2603    ALOGD("%s: Cannot find matching framework type", __func__);
2604    return NAME_NOT_FOUND;
2605}
2606
2607/*===========================================================================
2608 * FUNCTION   : lookupHalName
2609 *
2610 * DESCRIPTION: In case the enum is not same in fwk and backend
2611 *              make sure the parameter is correctly propogated
2612 *
2613 * PARAMETERS  :
2614 *   @arr      : map between the two enums
2615 *   @len      : len of the map
2616 *   @fwk_name : name of the hal_parm to map
2617 *
2618 * RETURN     : int32_t type of status
2619 *              hal_name  -- success
2620 *              none-zero failure code
2621 *==========================================================================*/
2622int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2623                                             int len, int fwk_name)
2624{
2625    for (int i = 0; i < len; i++) {
2626       if (arr[i].fwk_name == fwk_name)
2627           return arr[i].hal_name;
2628    }
2629    ALOGE("%s: Cannot find matching hal type", __func__);
2630    return NAME_NOT_FOUND;
2631}
2632
2633/*===========================================================================
2634 * FUNCTION   : getCapabilities
2635 *
2636 * DESCRIPTION: query camera capabilities
2637 *
2638 * PARAMETERS :
2639 *   @cameraId  : camera Id
2640 *   @info      : camera info struct to be filled in with camera capabilities
2641 *
2642 * RETURN     : int32_t type of status
2643 *              NO_ERROR  -- success
2644 *              none-zero failure code
2645 *==========================================================================*/
2646int QCamera3HardwareInterface::getCamInfo(int cameraId,
2647                                    struct camera_info *info)
2648{
2649    int rc = 0;
2650
2651    if (NULL == gCamCapability[cameraId]) {
2652        rc = initCapabilities(cameraId);
2653        if (rc < 0) {
2654            //pthread_mutex_unlock(&g_camlock);
2655            return rc;
2656        }
2657    }
2658
2659    if (NULL == gStaticMetadata[cameraId]) {
2660        rc = initStaticMetadata(cameraId);
2661        if (rc < 0) {
2662            return rc;
2663        }
2664    }
2665
2666    switch(gCamCapability[cameraId]->position) {
2667    case CAM_POSITION_BACK:
2668        info->facing = CAMERA_FACING_BACK;
2669        break;
2670
2671    case CAM_POSITION_FRONT:
2672        info->facing = CAMERA_FACING_FRONT;
2673        break;
2674
2675    default:
2676        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2677        rc = -1;
2678        break;
2679    }
2680
2681
2682    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2683    info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
2684    info->static_camera_characteristics = gStaticMetadata[cameraId];
2685
2686    return rc;
2687}
2688
2689/*===========================================================================
2690 * FUNCTION   : translateMetadata
2691 *
2692 * DESCRIPTION: translate the metadata into camera_metadata_t
2693 *
2694 * PARAMETERS : type of the request
2695 *
2696 *
2697 * RETURN     : success: camera_metadata_t*
2698 *              failure: NULL
2699 *
2700 *==========================================================================*/
2701camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2702{
2703    pthread_mutex_lock(&mMutex);
2704
2705    if (mDefaultMetadata[type] != NULL) {
2706        pthread_mutex_unlock(&mMutex);
2707        return mDefaultMetadata[type];
2708    }
2709    //first time we are handling this request
2710    //fill up the metadata structure using the wrapper class
2711    CameraMetadata settings;
2712    //translate from cam_capability_t to camera_metadata_tag_t
2713    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2714    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2715
2716    /*control*/
2717
2718    uint8_t controlIntent = 0;
2719    switch (type) {
2720      case CAMERA3_TEMPLATE_PREVIEW:
2721        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2722        break;
2723      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2724        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2725        break;
2726      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2727        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2728        break;
2729      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2730        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2731        break;
2732      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2733        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2734        break;
2735      default:
2736        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2737        break;
2738    }
2739    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2740
2741    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2742            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2743
2744    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2745    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2746
2747    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2748    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2749
2750    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2751    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2752
2753    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2754    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2755
2756    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2757    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2758
2759    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2760    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2761
2762    static uint8_t focusMode;
2763    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2764        ALOGE("%s: Setting focus mode to auto", __func__);
2765        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2766    } else {
2767        ALOGE("%s: Setting focus mode to off", __func__);
2768        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2769    }
2770    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2771
2772    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2773    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2774
2775    /*flash*/
2776    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2777    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2778
2779    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2780    settings.update(ANDROID_FLASH_FIRING_POWER,
2781            &flashFiringLevel, 1);
2782
2783    /* lens */
2784    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2785    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2786
2787    if (gCamCapability[mCameraId]->filter_densities_count) {
2788        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2789        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2790                        gCamCapability[mCameraId]->filter_densities_count);
2791    }
2792
2793    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2794    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2795
2796    /* frame duration */
2797    int64_t default_frame_duration = NSEC_PER_33MSEC;
2798    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
2799
2800    /* sensitivity */
2801    int32_t default_sensitivity = 100;
2802    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2803
2804    mDefaultMetadata[type] = settings.release();
2805
2806    pthread_mutex_unlock(&mMutex);
2807    return mDefaultMetadata[type];
2808}
2809
2810/*===========================================================================
2811 * FUNCTION   : setFrameParameters
2812 *
2813 * DESCRIPTION: set parameters per frame as requested in the metadata from
2814 *              framework
2815 *
2816 * PARAMETERS :
2817 *   @request   : request that needs to be serviced
2818 *   @streamTypeMask : bit mask of stream types on which buffers are requested
2819 *
2820 * RETURN     : success: NO_ERROR
2821 *              failure:
2822 *==========================================================================*/
2823int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
2824                    uint32_t streamTypeMask)
2825{
2826    /*translate from camera_metadata_t type to parm_type_t*/
2827    int rc = 0;
2828    if (request->settings == NULL && mFirstRequest) {
2829        /*settings cannot be null for the first request*/
2830        return BAD_VALUE;
2831    }
2832
2833    int32_t hal_version = CAM_HAL_V3;
2834
2835    memset(mParameters, 0, sizeof(parm_buffer_t));
2836    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2837    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2838                sizeof(hal_version), &hal_version);
2839    if (rc < 0) {
2840        ALOGE("%s: Failed to set hal version in the parameters", __func__);
2841        return BAD_VALUE;
2842    }
2843
2844    /*we need to update the frame number in the parameters*/
2845    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2846                                sizeof(request->frame_number), &(request->frame_number));
2847    if (rc < 0) {
2848        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2849        return BAD_VALUE;
2850    }
2851
2852    /* Update stream id mask where buffers are requested */
2853    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2854                                sizeof(streamTypeMask), &streamTypeMask);
2855    if (rc < 0) {
2856        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2857        return BAD_VALUE;
2858    }
2859
2860    if(request->settings != NULL){
2861        rc = translateMetadataToParameters(request);
2862    }
2863    /*set the parameters to backend*/
2864    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2865    return rc;
2866}
2867
2868/*===========================================================================
2869 * FUNCTION   : translateMetadataToParameters
2870 *
2871 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2872 *
2873 *
2874 * PARAMETERS :
2875 *   @request  : request sent from framework
2876 *
2877 *
2878 * RETURN     : success: NO_ERROR
2879 *              failure:
2880 *==========================================================================*/
2881int QCamera3HardwareInterface::translateMetadataToParameters
2882                                  (const camera3_capture_request_t *request)
2883{
2884    int rc = 0;
2885    CameraMetadata frame_settings;
2886    frame_settings = request->settings;
2887
2888    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2889        int32_t antibandingMode =
2890            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2891        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2892                sizeof(antibandingMode), &antibandingMode);
2893    }
2894
2895    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2896        int32_t expCompensation = frame_settings.find(
2897            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2898        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2899            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2900        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2901            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2902        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2903          sizeof(expCompensation), &expCompensation);
2904    }
2905
2906    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2907        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2908        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2909                sizeof(aeLock), &aeLock);
2910    }
2911    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2912        cam_fps_range_t fps_range;
2913        fps_range.min_fps =
2914            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2915        fps_range.max_fps =
2916            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2917        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2918                sizeof(fps_range), &fps_range);
2919    }
2920
2921    float focalDistance = -1.0;
2922    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2923        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2924        rc = AddSetParmEntryToBatch(mParameters,
2925                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2926                sizeof(focalDistance), &focalDistance);
2927    }
2928
2929    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2930        uint8_t fwk_focusMode =
2931            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2932        uint8_t focusMode;
2933        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2934            focusMode = CAM_FOCUS_MODE_INFINITY;
2935        } else{
2936         focusMode = lookupHalName(FOCUS_MODES_MAP,
2937                                   sizeof(FOCUS_MODES_MAP),
2938                                   fwk_focusMode);
2939        }
2940        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2941                sizeof(focusMode), &focusMode);
2942    }
2943
2944    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2945        uint8_t awbLock =
2946            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2947        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2948                sizeof(awbLock), &awbLock);
2949    }
2950
2951    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2952        uint8_t fwk_whiteLevel =
2953            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2954        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2955                sizeof(WHITE_BALANCE_MODES_MAP),
2956                fwk_whiteLevel);
2957        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2958                sizeof(whiteLevel), &whiteLevel);
2959    }
2960
2961    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2962        uint8_t fwk_effectMode =
2963            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2964        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2965                sizeof(EFFECT_MODES_MAP),
2966                fwk_effectMode);
2967        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2968                sizeof(effectMode), &effectMode);
2969    }
2970
2971    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2972        uint8_t fwk_aeMode =
2973            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2974        uint8_t aeMode;
2975        int32_t redeye;
2976
2977        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2978            aeMode = CAM_AE_MODE_OFF;
2979        } else {
2980            aeMode = CAM_AE_MODE_ON;
2981        }
2982        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2983            redeye = 1;
2984        } else {
2985            redeye = 0;
2986        }
2987
2988        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2989                                          sizeof(AE_FLASH_MODE_MAP),
2990                                          fwk_aeMode);
2991        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2992                sizeof(aeMode), &aeMode);
2993        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2994                sizeof(flashMode), &flashMode);
2995        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2996                sizeof(redeye), &redeye);
2997    }
2998
2999    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
3000        uint8_t colorCorrectMode =
3001            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
3002        rc =
3003            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
3004                    sizeof(colorCorrectMode), &colorCorrectMode);
3005    }
3006
3007    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
3008        cam_color_correct_gains_t colorCorrectGains;
3009        for (int i = 0; i < 4; i++) {
3010            colorCorrectGains.gains[i] =
3011                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
3012        }
3013        rc =
3014            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
3015                    sizeof(colorCorrectGains), &colorCorrectGains);
3016    }
3017
3018    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
3019        cam_color_correct_matrix_t colorCorrectTransform;
3020        cam_rational_type_t transform_elem;
3021        int num = 0;
3022        for (int i = 0; i < 3; i++) {
3023           for (int j = 0; j < 3; j++) {
3024              transform_elem.numerator =
3025                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
3026              transform_elem.denominator =
3027                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
3028              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
3029              num++;
3030           }
3031        }
3032        rc =
3033            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
3034                    sizeof(colorCorrectTransform), &colorCorrectTransform);
3035    }
3036
3037    cam_trigger_t aecTrigger;
3038    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
3039    aecTrigger.trigger_id = -1;
3040    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
3041        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
3042        aecTrigger.trigger =
3043            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
3044        aecTrigger.trigger_id =
3045            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
3046    }
3047    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
3048                                sizeof(aecTrigger), &aecTrigger);
3049
3050    /*af_trigger must come with a trigger id*/
3051    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
3052        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
3053        cam_trigger_t af_trigger;
3054        af_trigger.trigger =
3055            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
3056        af_trigger.trigger_id =
3057            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
3058        rc = AddSetParmEntryToBatch(mParameters,
3059                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
3060    }
3061
3062    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3063        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3064        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
3065                sizeof(metaMode), &metaMode);
3066        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3067           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3068           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
3069                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3070                                             fwk_sceneMode);
3071           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3072                sizeof(sceneMode), &sceneMode);
3073        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
3074           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3075           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3076                sizeof(sceneMode), &sceneMode);
3077        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
3078           uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
3079           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
3080                sizeof(sceneMode), &sceneMode);
3081        }
3082    }
3083
3084    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
3085        int32_t demosaic =
3086            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
3087        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
3088                sizeof(demosaic), &demosaic);
3089    }
3090
3091    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
3092        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
3093        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
3094                sizeof(edgeMode), &edgeMode);
3095    }
3096
3097    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
3098        int32_t edgeStrength =
3099            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
3100        rc = AddSetParmEntryToBatch(mParameters,
3101                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
3102    }
3103
3104    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
3105        int32_t respectFlashMode = 1;
3106        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
3107            uint8_t fwk_aeMode =
3108                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
3109            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
3110                respectFlashMode = 0;
3111                ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
3112                    __func__);
3113            }
3114        }
3115        if (respectFlashMode) {
3116            uint8_t flashMode =
3117                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3118            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
3119                                          sizeof(FLASH_MODES_MAP),
3120                                          flashMode);
3121            ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
3122            // To check: CAM_INTF_META_FLASH_MODE usage
3123            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
3124                          sizeof(flashMode), &flashMode);
3125        }
3126    }
3127
3128    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
3129        uint8_t flashPower =
3130            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
3131        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
3132                sizeof(flashPower), &flashPower);
3133    }
3134
3135    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
3136        int64_t flashFiringTime =
3137            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
3138        rc = AddSetParmEntryToBatch(mParameters,
3139                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
3140    }
3141
3142    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
3143        uint8_t geometricMode =
3144            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
3145        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
3146                sizeof(geometricMode), &geometricMode);
3147    }
3148
3149    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
3150        uint8_t geometricStrength =
3151            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
3152        rc = AddSetParmEntryToBatch(mParameters,
3153                CAM_INTF_META_GEOMETRIC_STRENGTH,
3154                sizeof(geometricStrength), &geometricStrength);
3155    }
3156
3157    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
3158        uint8_t hotPixelMode =
3159            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
3160        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
3161                sizeof(hotPixelMode), &hotPixelMode);
3162    }
3163
3164    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
3165        float lensAperture =
3166            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
3167        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
3168                sizeof(lensAperture), &lensAperture);
3169    }
3170
3171    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
3172        float filterDensity =
3173            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
3174        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
3175                sizeof(filterDensity), &filterDensity);
3176    }
3177
3178    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3179        float focalLength =
3180            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3181        rc = AddSetParmEntryToBatch(mParameters,
3182                CAM_INTF_META_LENS_FOCAL_LENGTH,
3183                sizeof(focalLength), &focalLength);
3184    }
3185
3186    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
3187        uint8_t optStabMode =
3188            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
3189        rc = AddSetParmEntryToBatch(mParameters,
3190                CAM_INTF_META_LENS_OPT_STAB_MODE,
3191                sizeof(optStabMode), &optStabMode);
3192    }
3193
3194    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
3195        uint8_t noiseRedMode =
3196            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
3197        rc = AddSetParmEntryToBatch(mParameters,
3198                CAM_INTF_META_NOISE_REDUCTION_MODE,
3199                sizeof(noiseRedMode), &noiseRedMode);
3200    }
3201
3202    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
3203        uint8_t noiseRedStrength =
3204            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
3205        rc = AddSetParmEntryToBatch(mParameters,
3206                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
3207                sizeof(noiseRedStrength), &noiseRedStrength);
3208    }
3209
3210    cam_crop_region_t scalerCropRegion;
3211    bool scalerCropSet = false;
3212    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
3213        scalerCropRegion.left =
3214            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
3215        scalerCropRegion.top =
3216            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
3217        scalerCropRegion.width =
3218            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
3219        scalerCropRegion.height =
3220            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
3221        rc = AddSetParmEntryToBatch(mParameters,
3222                CAM_INTF_META_SCALER_CROP_REGION,
3223                sizeof(scalerCropRegion), &scalerCropRegion);
3224        scalerCropSet = true;
3225    }
3226
3227    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
3228        int64_t sensorExpTime =
3229            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
3230        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
3231        rc = AddSetParmEntryToBatch(mParameters,
3232                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
3233                sizeof(sensorExpTime), &sensorExpTime);
3234    }
3235
3236    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
3237        int64_t sensorFrameDuration =
3238            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
3239        int64_t minFrameDuration = getMinFrameDuration(request);
3240        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
3241        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
3242            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
3243        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
3244        rc = AddSetParmEntryToBatch(mParameters,
3245                CAM_INTF_META_SENSOR_FRAME_DURATION,
3246                sizeof(sensorFrameDuration), &sensorFrameDuration);
3247    }
3248
3249    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3250        int32_t sensorSensitivity =
3251            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3252        if (sensorSensitivity <
3253                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
3254            sensorSensitivity =
3255                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
3256        if (sensorSensitivity >
3257                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
3258            sensorSensitivity =
3259                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
3260        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
3261        rc = AddSetParmEntryToBatch(mParameters,
3262                CAM_INTF_META_SENSOR_SENSITIVITY,
3263                sizeof(sensorSensitivity), &sensorSensitivity);
3264    }
3265
3266    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3267        int32_t shadingMode =
3268            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3269        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3270                sizeof(shadingMode), &shadingMode);
3271    }
3272
3273    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3274        uint8_t shadingStrength =
3275            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3276        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3277                sizeof(shadingStrength), &shadingStrength);
3278    }
3279
3280    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3281        uint8_t fwk_facedetectMode =
3282            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3283        uint8_t facedetectMode =
3284            lookupHalName(FACEDETECT_MODES_MAP,
3285                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
3286        rc = AddSetParmEntryToBatch(mParameters,
3287                CAM_INTF_META_STATS_FACEDETECT_MODE,
3288                sizeof(facedetectMode), &facedetectMode);
3289    }
3290
3291    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3292        uint8_t histogramMode =
3293            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3294        rc = AddSetParmEntryToBatch(mParameters,
3295                CAM_INTF_META_STATS_HISTOGRAM_MODE,
3296                sizeof(histogramMode), &histogramMode);
3297    }
3298
3299    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3300        uint8_t sharpnessMapMode =
3301            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3302        rc = AddSetParmEntryToBatch(mParameters,
3303                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3304                sizeof(sharpnessMapMode), &sharpnessMapMode);
3305    }
3306
3307    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3308        uint8_t tonemapMode =
3309            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3310        rc = AddSetParmEntryToBatch(mParameters,
3311                CAM_INTF_META_TONEMAP_MODE,
3312                sizeof(tonemapMode), &tonemapMode);
3313    }
3314    int point = 0;
3315    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3316        cam_tonemap_curve_t tonemapCurveBlue;
3317        tonemapCurveBlue.tonemap_points_cnt =
3318           gCamCapability[mCameraId]->max_tone_map_curve_points;
3319        for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3320            for (int j = 0; j < 2; j++) {
3321               tonemapCurveBlue.tonemap_points[i][j] =
3322                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3323               point++;
3324            }
3325        }
3326        rc = AddSetParmEntryToBatch(mParameters,
3327                CAM_INTF_META_TONEMAP_CURVE_BLUE,
3328                sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3329    }
3330    point = 0;
3331    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3332        cam_tonemap_curve_t tonemapCurveGreen;
3333        tonemapCurveGreen.tonemap_points_cnt =
3334           gCamCapability[mCameraId]->max_tone_map_curve_points;
3335        for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3336            for (int j = 0; j < 2; j++) {
3337               tonemapCurveGreen.tonemap_points[i][j] =
3338                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3339               point++;
3340            }
3341        }
3342        rc = AddSetParmEntryToBatch(mParameters,
3343                CAM_INTF_META_TONEMAP_CURVE_GREEN,
3344                sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3345    }
3346    point = 0;
3347    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3348        cam_tonemap_curve_t tonemapCurveRed;
3349        tonemapCurveRed.tonemap_points_cnt =
3350           gCamCapability[mCameraId]->max_tone_map_curve_points;
3351        for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3352            for (int j = 0; j < 2; j++) {
3353               tonemapCurveRed.tonemap_points[i][j] =
3354                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3355               point++;
3356            }
3357        }
3358        rc = AddSetParmEntryToBatch(mParameters,
3359                CAM_INTF_META_TONEMAP_CURVE_RED,
3360                sizeof(tonemapCurveRed), &tonemapCurveRed);
3361    }
3362
3363    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3364        uint8_t captureIntent =
3365            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3366        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3367                sizeof(captureIntent), &captureIntent);
3368    }
3369
3370    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3371        uint8_t blackLevelLock =
3372            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3373        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3374                sizeof(blackLevelLock), &blackLevelLock);
3375    }
3376
3377    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3378        uint8_t lensShadingMapMode =
3379            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3380        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3381                sizeof(lensShadingMapMode), &lensShadingMapMode);
3382    }
3383
3384    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3385        cam_area_t roi;
3386        bool reset = true;
3387        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
3388        if (scalerCropSet) {
3389            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3390        }
3391        if (reset) {
3392            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3393                    sizeof(roi), &roi);
3394        }
3395    }
3396
3397    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3398        cam_area_t roi;
3399        bool reset = true;
3400        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
3401        if (scalerCropSet) {
3402            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3403        }
3404        if (reset) {
3405            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3406                    sizeof(roi), &roi);
3407        }
3408    }
3409
3410    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3411        cam_area_t roi;
3412        bool reset = true;
3413        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
3414        if (scalerCropSet) {
3415            reset = resetIfNeededROI(&roi, &scalerCropRegion);
3416        }
3417        if (reset) {
3418            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3419                    sizeof(roi), &roi);
3420        }
3421    }
3422    return rc;
3423}
3424
3425/*===========================================================================
3426 * FUNCTION   : getJpegSettings
3427 *
3428 * DESCRIPTION: save the jpeg settings in the HAL
3429 *
3430 *
3431 * PARAMETERS :
3432 *   @settings  : frame settings information from framework
3433 *
3434 *
3435 * RETURN     : success: NO_ERROR
3436 *              failure:
3437 *==========================================================================*/
3438int QCamera3HardwareInterface::getJpegSettings
3439                                  (const camera_metadata_t *settings)
3440{
3441    if (mJpegSettings) {
3442        if (mJpegSettings->gps_timestamp) {
3443            free(mJpegSettings->gps_timestamp);
3444            mJpegSettings->gps_timestamp = NULL;
3445        }
3446        if (mJpegSettings->gps_coordinates) {
3447            for (int i = 0; i < 3; i++) {
3448                free(mJpegSettings->gps_coordinates[i]);
3449                mJpegSettings->gps_coordinates[i] = NULL;
3450            }
3451        }
3452        free(mJpegSettings);
3453        mJpegSettings = NULL;
3454    }
3455    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3456    CameraMetadata jpeg_settings;
3457    jpeg_settings = settings;
3458
3459    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3460        mJpegSettings->jpeg_orientation =
3461            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3462    } else {
3463        mJpegSettings->jpeg_orientation = 0;
3464    }
3465    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3466        mJpegSettings->jpeg_quality =
3467            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3468    } else {
3469        mJpegSettings->jpeg_quality = 85;
3470    }
3471    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3472        mJpegSettings->thumbnail_size.width =
3473            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3474        mJpegSettings->thumbnail_size.height =
3475            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3476    } else {
3477        mJpegSettings->thumbnail_size.width = 0;
3478        mJpegSettings->thumbnail_size.height = 0;
3479    }
3480    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3481        for (int i = 0; i < 3; i++) {
3482            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3483            *(mJpegSettings->gps_coordinates[i]) =
3484                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3485        }
3486    } else{
3487       for (int i = 0; i < 3; i++) {
3488            mJpegSettings->gps_coordinates[i] = NULL;
3489        }
3490    }
3491
3492    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3493        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3494        *(mJpegSettings->gps_timestamp) =
3495            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3496    } else {
3497        mJpegSettings->gps_timestamp = NULL;
3498    }
3499
3500    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3501        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3502        for (int i = 0; i < len; i++) {
3503            mJpegSettings->gps_processing_method[i] =
3504                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3505        }
3506        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3507            mJpegSettings->gps_processing_method[len] = '\0';
3508        }
3509    } else {
3510        mJpegSettings->gps_processing_method[0] = '\0';
3511    }
3512
3513    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
3514        mJpegSettings->sensor_sensitivity =
3515            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
3516    } else {
3517        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3518    }
3519
3520    mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3521
3522    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3523        mJpegSettings->lens_focal_length =
3524            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3525    }
3526    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3527        mJpegSettings->exposure_compensation =
3528            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3529    }
3530    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3531    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3532    mJpegSettings->is_jpeg_format = true;
3533    mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3534    return 0;
3535}
3536
3537/*===========================================================================
3538 * FUNCTION   : captureResultCb
3539 *
3540 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3541 *
3542 * PARAMETERS :
3543 *   @frame  : frame information from mm-camera-interface
3544 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3545 *   @userdata: userdata
3546 *
3547 * RETURN     : NONE
3548 *==========================================================================*/
3549void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3550                camera3_stream_buffer_t *buffer,
3551                uint32_t frame_number, void *userdata)
3552{
3553    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3554    if (hw == NULL) {
3555        ALOGE("%s: Invalid hw %p", __func__, hw);
3556        return;
3557    }
3558
3559    hw->captureResultCb(metadata, buffer, frame_number);
3560    return;
3561}
3562
3563
3564/*===========================================================================
3565 * FUNCTION   : initialize
3566 *
3567 * DESCRIPTION: Pass framework callback pointers to HAL
3568 *
3569 * PARAMETERS :
3570 *
3571 *
3572 * RETURN     : Success : 0
3573 *              Failure: -ENODEV
3574 *==========================================================================*/
3575
3576int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3577                                  const camera3_callback_ops_t *callback_ops)
3578{
3579    ALOGV("%s: E", __func__);
3580    QCamera3HardwareInterface *hw =
3581        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3582    if (!hw) {
3583        ALOGE("%s: NULL camera device", __func__);
3584        return -ENODEV;
3585    }
3586
3587    int rc = hw->initialize(callback_ops);
3588    ALOGV("%s: X", __func__);
3589    return rc;
3590}
3591
3592/*===========================================================================
3593 * FUNCTION   : configure_streams
3594 *
3595 * DESCRIPTION:
3596 *
3597 * PARAMETERS :
3598 *
3599 *
3600 * RETURN     : Success: 0
3601 *              Failure: -EINVAL (if stream configuration is invalid)
3602 *                       -ENODEV (fatal error)
3603 *==========================================================================*/
3604
3605int QCamera3HardwareInterface::configure_streams(
3606        const struct camera3_device *device,
3607        camera3_stream_configuration_t *stream_list)
3608{
3609    ALOGV("%s: E", __func__);
3610    QCamera3HardwareInterface *hw =
3611        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3612    if (!hw) {
3613        ALOGE("%s: NULL camera device", __func__);
3614        return -ENODEV;
3615    }
3616    int rc = hw->configureStreams(stream_list);
3617    ALOGV("%s: X", __func__);
3618    return rc;
3619}
3620
3621/*===========================================================================
3622 * FUNCTION   : register_stream_buffers
3623 *
3624 * DESCRIPTION: Register stream buffers with the device
3625 *
3626 * PARAMETERS :
3627 *
3628 * RETURN     :
3629 *==========================================================================*/
3630int QCamera3HardwareInterface::register_stream_buffers(
3631        const struct camera3_device *device,
3632        const camera3_stream_buffer_set_t *buffer_set)
3633{
3634    ALOGV("%s: E", __func__);
3635    QCamera3HardwareInterface *hw =
3636        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3637    if (!hw) {
3638        ALOGE("%s: NULL camera device", __func__);
3639        return -ENODEV;
3640    }
3641    int rc = hw->registerStreamBuffers(buffer_set);
3642    ALOGV("%s: X", __func__);
3643    return rc;
3644}
3645
3646/*===========================================================================
3647 * FUNCTION   : construct_default_request_settings
3648 *
3649 * DESCRIPTION: Configure a settings buffer to meet the required use case
3650 *
3651 * PARAMETERS :
3652 *
3653 *
3654 * RETURN     : Success: Return valid metadata
3655 *              Failure: Return NULL
3656 *==========================================================================*/
3657const camera_metadata_t* QCamera3HardwareInterface::
3658    construct_default_request_settings(const struct camera3_device *device,
3659                                        int type)
3660{
3661
3662    ALOGV("%s: E", __func__);
3663    camera_metadata_t* fwk_metadata = NULL;
3664    QCamera3HardwareInterface *hw =
3665        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3666    if (!hw) {
3667        ALOGE("%s: NULL camera device", __func__);
3668        return NULL;
3669    }
3670
3671    fwk_metadata = hw->translateCapabilityToMetadata(type);
3672
3673    ALOGV("%s: X", __func__);
3674    return fwk_metadata;
3675}
3676
3677/*===========================================================================
3678 * FUNCTION   : process_capture_request
3679 *
3680 * DESCRIPTION:
3681 *
3682 * PARAMETERS :
3683 *
3684 *
3685 * RETURN     :
3686 *==========================================================================*/
3687int QCamera3HardwareInterface::process_capture_request(
3688                    const struct camera3_device *device,
3689                    camera3_capture_request_t *request)
3690{
3691    ALOGV("%s: E", __func__);
3692    QCamera3HardwareInterface *hw =
3693        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3694    if (!hw) {
3695        ALOGE("%s: NULL camera device", __func__);
3696        return -EINVAL;
3697    }
3698
3699    int rc = hw->processCaptureRequest(request);
3700    ALOGV("%s: X", __func__);
3701    return rc;
3702}
3703
3704/*===========================================================================
3705 * FUNCTION   : get_metadata_vendor_tag_ops
3706 *
3707 * DESCRIPTION:
3708 *
3709 * PARAMETERS :
3710 *
3711 *
3712 * RETURN     :
3713 *==========================================================================*/
3714
3715void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3716                const struct camera3_device *device,
3717                vendor_tag_query_ops_t* ops)
3718{
3719    ALOGV("%s: E", __func__);
3720    QCamera3HardwareInterface *hw =
3721        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3722    if (!hw) {
3723        ALOGE("%s: NULL camera device", __func__);
3724        return;
3725    }
3726
3727    hw->getMetadataVendorTagOps(ops);
3728    ALOGV("%s: X", __func__);
3729    return;
3730}
3731
3732/*===========================================================================
3733 * FUNCTION   : dump
3734 *
3735 * DESCRIPTION:
3736 *
3737 * PARAMETERS :
3738 *
3739 *
3740 * RETURN     :
3741 *==========================================================================*/
3742
3743void QCamera3HardwareInterface::dump(
3744                const struct camera3_device *device, int fd)
3745{
3746    ALOGV("%s: E", __func__);
3747    QCamera3HardwareInterface *hw =
3748        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3749    if (!hw) {
3750        ALOGE("%s: NULL camera device", __func__);
3751        return;
3752    }
3753
3754    hw->dump(fd);
3755    ALOGV("%s: X", __func__);
3756    return;
3757}
3758
3759/*===========================================================================
3760 * FUNCTION   : flush
3761 *
3762 * DESCRIPTION:
3763 *
3764 * PARAMETERS :
3765 *
3766 *
3767 * RETURN     :
3768 *==========================================================================*/
3769
3770int QCamera3HardwareInterface::flush(
3771                const struct camera3_device *device)
3772{
3773    int rc;
3774    ALOGV("%s: E", __func__);
3775    QCamera3HardwareInterface *hw =
3776        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3777    if (!hw) {
3778        ALOGE("%s: NULL camera device", __func__);
3779        return -EINVAL;
3780    }
3781
3782    rc = hw->flush();
3783    ALOGV("%s: X", __func__);
3784    return rc;
3785}
3786
3787/*===========================================================================
3788 * FUNCTION   : close_camera_device
3789 *
3790 * DESCRIPTION:
3791 *
3792 * PARAMETERS :
3793 *
3794 *
3795 * RETURN     :
3796 *==========================================================================*/
3797int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3798{
3799    ALOGV("%s: E", __func__);
3800    int ret = NO_ERROR;
3801    QCamera3HardwareInterface *hw =
3802        reinterpret_cast<QCamera3HardwareInterface *>(
3803            reinterpret_cast<camera3_device_t *>(device)->priv);
3804    if (!hw) {
3805        ALOGE("NULL camera device");
3806        return BAD_VALUE;
3807    }
3808    delete hw;
3809
3810    pthread_mutex_lock(&mCameraSessionLock);
3811    mCameraSessionActive = 0;
3812    pthread_mutex_unlock(&mCameraSessionLock);
3813    ALOGV("%s: X", __func__);
3814    return ret;
3815}
3816
3817/*===========================================================================
3818 * FUNCTION   : getWaveletDenoiseProcessPlate
3819 *
3820 * DESCRIPTION: query wavelet denoise process plate
3821 *
3822 * PARAMETERS : None
3823 *
3824 * RETURN     : WNR prcocess plate vlaue
3825 *==========================================================================*/
3826cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3827{
3828    char prop[PROPERTY_VALUE_MAX];
3829    memset(prop, 0, sizeof(prop));
3830    property_get("persist.denoise.process.plates", prop, "0");
3831    int processPlate = atoi(prop);
3832    switch(processPlate) {
3833    case 0:
3834        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3835    case 1:
3836        return CAM_WAVELET_DENOISE_CBCR_ONLY;
3837    case 2:
3838        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3839    case 3:
3840        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3841    default:
3842        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3843    }
3844}
3845
3846/*===========================================================================
3847 * FUNCTION   : needRotationReprocess
3848 *
3849 * DESCRIPTION: if rotation needs to be done by reprocess in pp
3850 *
3851 * PARAMETERS : none
3852 *
3853 * RETURN     : true: needed
3854 *              false: no need
3855 *==========================================================================*/
3856bool QCamera3HardwareInterface::needRotationReprocess()
3857{
3858
3859    if (!mJpegSettings->is_jpeg_format) {
3860        // RAW image, no need to reprocess
3861        return false;
3862    }
3863
3864    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3865        mJpegSettings->jpeg_orientation > 0) {
3866        // current rotation is not zero, and pp has the capability to process rotation
3867        ALOGD("%s: need do reprocess for rotation", __func__);
3868        return true;
3869    }
3870
3871    return false;
3872}
3873
3874/*===========================================================================
3875 * FUNCTION   : needReprocess
3876 *
3877 * DESCRIPTION: if reprocess in needed
3878 *
3879 * PARAMETERS : none
3880 *
3881 * RETURN     : true: needed
3882 *              false: no need
3883 *==========================================================================*/
3884bool QCamera3HardwareInterface::needReprocess()
3885{
3886    if (!mJpegSettings->is_jpeg_format) {
3887        // RAW image, no need to reprocess
3888        return false;
3889    }
3890
3891    if ((mJpegSettings->min_required_pp_mask > 0) ||
3892         isWNREnabled()) {
3893        // TODO: add for ZSL HDR later
3894        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3895        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3896        return true;
3897    }
3898    return needRotationReprocess();
3899}
3900
3901/*===========================================================================
3902 * FUNCTION   : addOnlineReprocChannel
3903 *
3904 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3905 *              coming from input channel
3906 *
3907 * PARAMETERS :
3908 *   @pInputChannel : ptr to input channel whose frames will be post-processed
3909 *
3910 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3911 *==========================================================================*/
3912QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3913              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3914{
3915    int32_t rc = NO_ERROR;
3916    QCamera3ReprocessChannel *pChannel = NULL;
3917    if (pInputChannel == NULL) {
3918        ALOGE("%s: input channel obj is NULL", __func__);
3919        return NULL;
3920    }
3921
3922    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3923            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3924    if (NULL == pChannel) {
3925        ALOGE("%s: no mem for reprocess channel", __func__);
3926        return NULL;
3927    }
3928
3929    // Capture channel, only need snapshot and postview streams start together
3930    mm_camera_channel_attr_t attr;
3931    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3932    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3933    attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3934    rc = pChannel->initialize();
3935    if (rc != NO_ERROR) {
3936        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3937        delete pChannel;
3938        return NULL;
3939    }
3940
3941    // pp feature config
3942    cam_pp_feature_config_t pp_config;
3943    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3944    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3945        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3946        pp_config.sharpness = 10;
3947    }
3948
3949    if (isWNREnabled()) {
3950        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3951        pp_config.denoise2d.denoise_enable = 1;
3952        pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3953    }
3954    if (needRotationReprocess()) {
3955        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3956        int rotation = mJpegSettings->jpeg_orientation;
3957        if (rotation == 0) {
3958            pp_config.rotation = ROTATE_0;
3959        } else if (rotation == 90) {
3960            pp_config.rotation = ROTATE_90;
3961        } else if (rotation == 180) {
3962            pp_config.rotation = ROTATE_180;
3963        } else if (rotation == 270) {
3964            pp_config.rotation = ROTATE_270;
3965        }
3966    }
3967
3968   rc = pChannel->addReprocStreamsFromSource(pp_config,
3969                                             pInputChannel,
3970                                             mMetadataChannel);
3971
3972    if (rc != NO_ERROR) {
3973        delete pChannel;
3974        return NULL;
3975    }
3976    return pChannel;
3977}
3978
3979int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3980{
3981    return gCamCapability[mCameraId]->min_num_pp_bufs;
3982}
3983
3984bool QCamera3HardwareInterface::isWNREnabled() {
3985    return gCamCapability[mCameraId]->isWnrSupported;
3986}
3987
3988}; //end namespace qcamera
3989