QCamera3HWI.cpp revision 74f6761b0f475069b59e715fcf8753b1664e0bdb
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
54    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
55    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
56    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
57    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
58    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
59    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
60    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
61    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
62    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
63};
64
65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
66    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
67    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
68    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
69    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
70    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
71    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
72    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
73    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
74    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
75};
76
77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
78    { ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED,    CAM_SCENE_MODE_OFF },
79    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
80    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
81    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
82    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
83    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
85    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
86    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
87    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
88    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
89    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
90    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
91    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
92    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
93    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
94};
95
96const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
97    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
98    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
99    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
100    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
101    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
102    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
103};
104
105const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
106    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
107    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
108    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
110};
111
112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
113    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
114    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
115    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
116    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
117    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
121    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
122    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
123    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
124};
125
126const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
127                                             320, 240, 176, 144, 0, 0};
128
129camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
130    initialize:                         QCamera3HardwareInterface::initialize,
131    configure_streams:                  QCamera3HardwareInterface::configure_streams,
132    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
133    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
134    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
135    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
136    dump:                               QCamera3HardwareInterface::dump,
137};
138
139
140/*===========================================================================
141 * FUNCTION   : QCamera3HardwareInterface
142 *
143 * DESCRIPTION: constructor of QCamera3HardwareInterface
144 *
145 * PARAMETERS :
146 *   @cameraId  : camera ID
147 *
148 * RETURN     : none
149 *==========================================================================*/
150QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
151    : mCameraId(cameraId),
152      mCameraHandle(NULL),
153      mCameraOpened(false),
154      mCallbackOps(NULL),
155      mInputStream(NULL),
156      mMetadataChannel(NULL),
157      mFirstRequest(false),
158      mParamHeap(NULL),
159      mParameters(NULL),
160      mJpegSettings(NULL)
161{
162    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
163    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
164    mCameraDevice.common.close = close_camera_device;
165    mCameraDevice.ops = &mCameraOps;
166    mCameraDevice.priv = this;
167    gCamCapability[cameraId]->version = CAM_HAL_V3;
168
169    pthread_mutex_init(&mRequestLock, NULL);
170    pthread_cond_init(&mRequestCond, NULL);
171    mPendingRequest = 0;
172    mCurrentRequestId = -1;
173
174    pthread_mutex_init(&mMutex, NULL);
175    pthread_mutex_init(&mCaptureResultLock, NULL);
176
177    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
178        mDefaultMetadata[i] = NULL;
179}
180
181/*===========================================================================
182 * FUNCTION   : ~QCamera3HardwareInterface
183 *
184 * DESCRIPTION: destructor of QCamera3HardwareInterface
185 *
186 * PARAMETERS : none
187 *
188 * RETURN     : none
189 *==========================================================================*/
190QCamera3HardwareInterface::~QCamera3HardwareInterface()
191{
192    ALOGV("%s: E", __func__);
193    /* Clean up all channels */
194    mMetadataChannel->stop();
195    delete mMetadataChannel;
196    mMetadataChannel = NULL;
197    /* We need to stop all streams before deleting any stream */
198    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
199        it != mStreamInfo.end(); it++) {
200        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
201        channel->stop();
202    }
203    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
204        it != mStreamInfo.end(); it++) {
205        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
206        delete channel;
207        free (*it);
208    }
209
210    if (mJpegSettings != NULL) {
211        free(mJpegSettings);
212        mJpegSettings = NULL;
213    }
214    deinitParameters();
215    closeCamera();
216
217    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
218        if (mDefaultMetadata[i])
219            free_camera_metadata(mDefaultMetadata[i]);
220
221    pthread_mutex_destroy(&mRequestLock);
222    pthread_cond_destroy(&mRequestCond);
223
224    pthread_mutex_destroy(&mMutex);
225    pthread_mutex_destroy(&mCaptureResultLock);
226    ALOGV("%s: X", __func__);
227}
228
229/*===========================================================================
230 * FUNCTION   : openCamera
231 *
232 * DESCRIPTION: open camera
233 *
234 * PARAMETERS :
235 *   @hw_device  : double ptr for camera device struct
236 *
237 * RETURN     : int32_t type of status
238 *              NO_ERROR  -- success
239 *              none-zero failure code
240 *==========================================================================*/
241int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
242{
243    //int rc = NO_ERROR;
244    int rc = 0;
245    if (mCameraOpened) {
246        *hw_device = NULL;
247        return PERMISSION_DENIED;
248    }
249
250    rc = openCamera();
251    if (rc == 0)
252        *hw_device = &mCameraDevice.common;
253    else
254        *hw_device = NULL;
255    return rc;
256}
257
258/*===========================================================================
259 * FUNCTION   : openCamera
260 *
261 * DESCRIPTION: open camera
262 *
263 * PARAMETERS : none
264 *
265 * RETURN     : int32_t type of status
266 *              NO_ERROR  -- success
267 *              none-zero failure code
268 *==========================================================================*/
269int QCamera3HardwareInterface::openCamera()
270{
271    if (mCameraHandle) {
272        ALOGE("Failure: Camera already opened");
273        return ALREADY_EXISTS;
274    }
275    mCameraHandle = camera_open(mCameraId);
276    if (!mCameraHandle) {
277        ALOGE("camera_open failed.");
278        return UNKNOWN_ERROR;
279    }
280
281    mCameraOpened = true;
282
283    return NO_ERROR;
284}
285
286/*===========================================================================
287 * FUNCTION   : closeCamera
288 *
289 * DESCRIPTION: close camera
290 *
291 * PARAMETERS : none
292 *
293 * RETURN     : int32_t type of status
294 *              NO_ERROR  -- success
295 *              none-zero failure code
296 *==========================================================================*/
297int QCamera3HardwareInterface::closeCamera()
298{
299    int rc = NO_ERROR;
300
301    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
302    mCameraHandle = NULL;
303    mCameraOpened = false;
304
305    return rc;
306}
307
308/*===========================================================================
309 * FUNCTION   : initialize
310 *
311 * DESCRIPTION: Initialize frameworks callback functions
312 *
313 * PARAMETERS :
314 *   @callback_ops : callback function to frameworks
315 *
316 * RETURN     :
317 *
318 *==========================================================================*/
319int QCamera3HardwareInterface::initialize(
320        const struct camera3_callback_ops *callback_ops)
321{
322    int rc;
323
324    pthread_mutex_lock(&mMutex);
325
326    rc = initParameters();
327    if (rc < 0) {
328        ALOGE("%s: initParamters failed %d", __func__, rc);
329       goto err1;
330    }
331    //Create metadata channel and initialize it
332    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
333                    mCameraHandle->ops, captureResultCb,
334                    &gCamCapability[mCameraId]->padding_info, this);
335    if (mMetadataChannel == NULL) {
336        ALOGE("%s: failed to allocate metadata channel", __func__);
337        rc = -ENOMEM;
338        goto err2;
339    }
340    rc = mMetadataChannel->initialize();
341    if (rc < 0) {
342        ALOGE("%s: metadata channel initialization failed", __func__);
343        goto err3;
344    }
345
346    mCallbackOps = callback_ops;
347
348    pthread_mutex_unlock(&mMutex);
349    return 0;
350
351err3:
352    delete mMetadataChannel;
353    mMetadataChannel = NULL;
354err2:
355    deinitParameters();
356err1:
357    pthread_mutex_unlock(&mMutex);
358    return rc;
359}
360
361/*===========================================================================
362 * FUNCTION   : configureStreams
363 *
364 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
365 *              and output streams.
366 *
367 * PARAMETERS :
368 *   @stream_list : streams to be configured
369 *
370 * RETURN     :
371 *
372 *==========================================================================*/
373int QCamera3HardwareInterface::configureStreams(
374        camera3_stream_configuration_t *streamList)
375{
376    int rc = 0;
377    pthread_mutex_lock(&mMutex);
378
379    // Sanity check stream_list
380    if (streamList == NULL) {
381        ALOGE("%s: NULL stream configuration", __func__);
382        pthread_mutex_unlock(&mMutex);
383        return BAD_VALUE;
384    }
385
386    if (streamList->streams == NULL) {
387        ALOGE("%s: NULL stream list", __func__);
388        pthread_mutex_unlock(&mMutex);
389        return BAD_VALUE;
390    }
391
392    if (streamList->num_streams < 1) {
393        ALOGE("%s: Bad number of streams requested: %d", __func__,
394                streamList->num_streams);
395        pthread_mutex_unlock(&mMutex);
396        return BAD_VALUE;
397    }
398
399    camera3_stream_t *inputStream = NULL;
400    /* first invalidate all the steams in the mStreamList
401     * if they appear again, they will be validated */
402    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
403            it != mStreamInfo.end(); it++) {
404        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
405        channel->stop();
406        (*it)->status = INVALID;
407    }
408
409    for (size_t i = 0; i < streamList->num_streams; i++) {
410        camera3_stream_t *newStream = streamList->streams[i];
411        ALOGV("%s: newStream type = %d, stream format = %d",
412                __func__, newStream->stream_type, newStream->format);
413        //if the stream is in the mStreamList validate it
414        bool stream_exists = false;
415        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
416                it != mStreamInfo.end(); it++) {
417            if ((*it)->stream == newStream) {
418                QCamera3Channel *channel =
419                    (QCamera3Channel*)(*it)->stream->priv;
420                stream_exists = true;
421                (*it)->status = RECONFIGURE;
422                /*delete the channel object associated with the stream because
423                  we need to reconfigure*/
424                delete channel;
425                (*it)->stream->priv = NULL;
426            }
427        }
428        if (!stream_exists) {
429            //new stream
430            stream_info_t* stream_info;
431            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
432            stream_info->stream = newStream;
433            stream_info->status = VALID;
434            stream_info->registered = 0;
435            mStreamInfo.push_back(stream_info);
436        }
437        if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
438            if (inputStream != NULL) {
439                ALOGE("%s: Multiple input streams requested!", __func__);
440                pthread_mutex_unlock(&mMutex);
441                return BAD_VALUE;
442            }
443            inputStream = newStream;
444        }
445    }
446    mInputStream = inputStream;
447
448    /*clean up invalid streams*/
449    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
450            it != mStreamInfo.end();) {
451        if(((*it)->status) == INVALID){
452            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
453            delete channel;
454            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
455            free(*it);
456            it = mStreamInfo.erase(it);
457        } else {
458            it++;
459        }
460    }
461
462    //mMetadataChannel->stop();
463
464    /* Allocate channel objects for the requested streams */
465    for (size_t i = 0; i < streamList->num_streams; i++) {
466        camera3_stream_t *newStream = streamList->streams[i];
467        if (newStream->priv == NULL) {
468            //New stream, construct channel
469            switch (newStream->stream_type) {
470            case CAMERA3_STREAM_INPUT:
471                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
472                break;
473            case CAMERA3_STREAM_BIDIRECTIONAL:
474                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
475                    GRALLOC_USAGE_HW_CAMERA_WRITE;
476                break;
477            case CAMERA3_STREAM_OUTPUT:
478                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
479                break;
480            default:
481                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
482                break;
483            }
484
485            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
486                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
487                QCamera3Channel *channel;
488                switch (newStream->format) {
489                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
490                case HAL_PIXEL_FORMAT_YCbCr_420_888:
491                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
492                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
493                            mCameraHandle->ops, captureResultCb,
494                            &gCamCapability[mCameraId]->padding_info, this, newStream);
495                    if (channel == NULL) {
496                        ALOGE("%s: allocation of channel failed", __func__);
497                        pthread_mutex_unlock(&mMutex);
498                        return -ENOMEM;
499                    }
500
501                    newStream->priv = channel;
502                    break;
503                case HAL_PIXEL_FORMAT_BLOB:
504                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
505                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
506                            mCameraHandle->ops, captureResultCb,
507                            &gCamCapability[mCameraId]->padding_info, this, newStream);
508                    if (channel == NULL) {
509                        ALOGE("%s: allocation of channel failed", __func__);
510                        pthread_mutex_unlock(&mMutex);
511                        return -ENOMEM;
512                    }
513                    newStream->priv = channel;
514                    break;
515
516                //TODO: Add support for app consumed format?
517                default:
518                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
519                    break;
520                }
521            }
522        } else {
523            // Channel already exists for this stream
524            // Do nothing for now
525        }
526    }
527    /*For the streams to be reconfigured we need to register the buffers
528      since the framework wont*/
529    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
530            it != mStreamInfo.end(); it++) {
531        if ((*it)->status == RECONFIGURE) {
532            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
533            /*only register buffers for streams that have already been
534              registered*/
535            if ((*it)->registered) {
536                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
537                        (*it)->buffer_set.buffers);
538                if (rc != NO_ERROR) {
539                    ALOGE("%s: Failed to register the buffers of old stream,\
540                            rc = %d", __func__, rc);
541                }
542                ALOGD("%s: channel %p has %d buffers",
543                        __func__, channel, (*it)->buffer_set.num_buffers);
544            }
545        }
546
547        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
548        if (index == NAME_NOT_FOUND) {
549            mPendingBuffersMap.add((*it)->stream, 0);
550        } else {
551            mPendingBuffersMap.editValueAt(index) = 0;
552        }
553    }
554
555    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
556    mPendingRequestsList.clear();
557
558    //settings/parameters don't carry over for new configureStreams
559    memset(mParameters, 0, sizeof(parm_buffer_t));
560    mFirstRequest = true;
561
562    pthread_mutex_unlock(&mMutex);
563    return rc;
564}
565
566/*===========================================================================
567 * FUNCTION   : validateCaptureRequest
568 *
569 * DESCRIPTION: validate a capture request from camera service
570 *
571 * PARAMETERS :
572 *   @request : request from framework to process
573 *
574 * RETURN     :
575 *
576 *==========================================================================*/
577int QCamera3HardwareInterface::validateCaptureRequest(
578                    camera3_capture_request_t *request)
579{
580    ssize_t idx = 0;
581    const camera3_stream_buffer_t *b;
582    CameraMetadata meta;
583
584    /* Sanity check the request */
585    if (request == NULL) {
586        ALOGE("%s: NULL capture request", __func__);
587        return BAD_VALUE;
588    }
589
590    uint32_t frameNumber = request->frame_number;
591    if (request->input_buffer != NULL &&
592            request->input_buffer->stream != mInputStream) {
593        ALOGE("%s: Request %d: Input buffer not from input stream!",
594                __FUNCTION__, frameNumber);
595        return BAD_VALUE;
596    }
597    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
598        ALOGE("%s: Request %d: No output buffers provided!",
599                __FUNCTION__, frameNumber);
600        return BAD_VALUE;
601    }
602    if (request->input_buffer != NULL) {
603        //TODO
604        ALOGE("%s: Not supporting input buffer yet", __func__);
605        return BAD_VALUE;
606    }
607
608    // Validate all buffers
609    b = request->output_buffers;
610    do {
611        QCamera3Channel *channel =
612                static_cast<QCamera3Channel*>(b->stream->priv);
613        if (channel == NULL) {
614            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
615                    __func__, frameNumber, idx);
616            return BAD_VALUE;
617        }
618        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
619            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
620                    __func__, frameNumber, idx);
621            return BAD_VALUE;
622        }
623        if (b->release_fence != -1) {
624            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
625                    __func__, frameNumber, idx);
626            return BAD_VALUE;
627        }
628        if (b->buffer == NULL) {
629            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
630                    __func__, frameNumber, idx);
631            return BAD_VALUE;
632        }
633        idx++;
634        b = request->output_buffers + idx;
635    } while (idx < (ssize_t)request->num_output_buffers);
636
637    return NO_ERROR;
638}
639
640/*===========================================================================
641 * FUNCTION   : registerStreamBuffers
642 *
643 * DESCRIPTION: Register buffers for a given stream with the HAL device.
644 *
645 * PARAMETERS :
646 *   @stream_list : streams to be configured
647 *
648 * RETURN     :
649 *
650 *==========================================================================*/
651int QCamera3HardwareInterface::registerStreamBuffers(
652        const camera3_stream_buffer_set_t *buffer_set)
653{
654    int rc = 0;
655
656    pthread_mutex_lock(&mMutex);
657
658    if (buffer_set == NULL) {
659        ALOGE("%s: Invalid buffer_set parameter.", __func__);
660        pthread_mutex_unlock(&mMutex);
661        return -EINVAL;
662    }
663    if (buffer_set->stream == NULL) {
664        ALOGE("%s: Invalid stream parameter.", __func__);
665        pthread_mutex_unlock(&mMutex);
666        return -EINVAL;
667    }
668    if (buffer_set->num_buffers < 1) {
669        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
670        pthread_mutex_unlock(&mMutex);
671        return -EINVAL;
672    }
673    if (buffer_set->buffers == NULL) {
674        ALOGE("%s: Invalid buffers parameter.", __func__);
675        pthread_mutex_unlock(&mMutex);
676        return -EINVAL;
677    }
678
679    camera3_stream_t *stream = buffer_set->stream;
680    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
681
682    //set the buffer_set in the mStreamInfo array
683    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
684            it != mStreamInfo.end(); it++) {
685        if ((*it)->stream == stream) {
686            uint32_t numBuffers = buffer_set->num_buffers;
687            (*it)->buffer_set.stream = buffer_set->stream;
688            (*it)->buffer_set.num_buffers = numBuffers;
689            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
690            if ((*it)->buffer_set.buffers == NULL) {
691                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
692                pthread_mutex_unlock(&mMutex);
693                return -ENOMEM;
694            }
695            for (size_t j = 0; j < numBuffers; j++){
696                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
697            }
698            (*it)->registered = 1;
699        }
700    }
701
702    if (stream->stream_type != CAMERA3_STREAM_OUTPUT) {
703        ALOGE("%s: not yet support non output type stream", __func__);
704        pthread_mutex_unlock(&mMutex);
705        return -EINVAL;
706    }
707    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
708    if (rc < 0) {
709        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
710        pthread_mutex_unlock(&mMutex);
711        return -ENODEV;
712    }
713
714    pthread_mutex_unlock(&mMutex);
715    return NO_ERROR;
716}
717
718/*===========================================================================
719 * FUNCTION   : processCaptureRequest
720 *
721 * DESCRIPTION: process a capture request from camera service
722 *
723 * PARAMETERS :
724 *   @request : request from framework to process
725 *
726 * RETURN     :
727 *
728 *==========================================================================*/
729int QCamera3HardwareInterface::processCaptureRequest(
730                    camera3_capture_request_t *request)
731{
732    int rc = NO_ERROR;
733    int32_t request_id;
734    CameraMetadata meta;
735
736    pthread_mutex_lock(&mMutex);
737
738    rc = validateCaptureRequest(request);
739    if (rc != NO_ERROR) {
740        ALOGE("%s: incoming request is not valid", __func__);
741        pthread_mutex_unlock(&mMutex);
742        return rc;
743    }
744
745    uint32_t frameNumber = request->frame_number;
746
747    rc = setFrameParameters(request->frame_number, request->settings);
748    if (rc < 0) {
749        ALOGE("%s: fail to set frame parameters", __func__);
750        pthread_mutex_unlock(&mMutex);
751        return rc;
752    }
753
754    meta = request->settings;
755    if (meta.exists(ANDROID_REQUEST_ID)) {
756        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
757        mCurrentRequestId = request_id;
758        ALOGD("%s: Received request with id: %d",__func__, request_id);
759    } else if (mFirstRequest || mCurrentRequestId == -1){
760        ALOGE("%s: Unable to find request id field, \
761                & no previous id available", __func__);
762        return NAME_NOT_FOUND;
763    } else {
764        ALOGD("%s: Re-using old request id", __func__);
765        request_id = mCurrentRequestId;
766    }
767
768
769    ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__,
770                                    request->num_output_buffers);
771    // Acquire all request buffers first
772    for (size_t i = 0; i < request->num_output_buffers; i++) {
773        const camera3_stream_buffer_t& output = request->output_buffers[i];
774        sp<Fence> acquireFence = new Fence(output.acquire_fence);
775
776        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
777        //Call function to store local copy of jpeg data for encode params.
778            rc = getJpegSettings(request->settings);
779            if (rc < 0) {
780                ALOGE("%s: failed to get jpeg parameters", __func__);
781                pthread_mutex_unlock(&mMutex);
782                return rc;
783            }
784        }
785
786        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
787        if (rc != OK) {
788            ALOGE("%s: fence wait failed %d", __func__, rc);
789            pthread_mutex_unlock(&mMutex);
790            return rc;
791        }
792    }
793
794    /* Update pending request list and pending buffers map */
795    pthread_mutex_lock(&mRequestLock);
796    PendingRequestInfo pendingRequest;
797    pendingRequest.frame_number = frameNumber;
798    pendingRequest.num_buffers = request->num_output_buffers;
799    pendingRequest.request_id = request_id;
800
801    for (size_t i = 0; i < request->num_output_buffers; i++) {
802        RequestedBufferInfo requestedBuf;
803        requestedBuf.stream = request->output_buffers[i].stream;
804        requestedBuf.buffer = NULL;
805        pendingRequest.buffers.push_back(requestedBuf);
806
807        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
808    }
809    mPendingRequestsList.push_back(pendingRequest);
810    pthread_mutex_unlock(&mRequestLock);
811
812    // Notify metadata channel we receive a request
813    mMetadataChannel->request(NULL, frameNumber);
814
815    // Call request on other streams
816    for (size_t i = 0; i < request->num_output_buffers; i++) {
817        const camera3_stream_buffer_t& output = request->output_buffers[i];
818        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
819
820        if (channel == NULL) {
821            ALOGE("%s: invalid channel pointer for stream", __func__);
822            continue;
823        }
824
825        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
826            rc = channel->request(output.buffer, frameNumber, mJpegSettings);
827        } else {
828            ALOGI("%s: %d, request with buffer %p, frame_number %d", __func__, __LINE__, output.buffer, frameNumber);
829            rc = channel->request(output.buffer, frameNumber);
830        }
831        if (rc < 0)
832            ALOGE("%s: request failed", __func__);
833    }
834
835    mFirstRequest = false;
836
837    //Block on conditional variable
838    pthread_mutex_lock(&mRequestLock);
839    mPendingRequest = 1;
840    while (mPendingRequest == 1) {
841        pthread_cond_wait(&mRequestCond, &mRequestLock);
842    }
843    pthread_mutex_unlock(&mRequestLock);
844
845    pthread_mutex_unlock(&mMutex);
846    return rc;
847}
848
849/*===========================================================================
850 * FUNCTION   : getMetadataVendorTagOps
851 *
852 * DESCRIPTION:
853 *
854 * PARAMETERS :
855 *
856 *
857 * RETURN     :
858 *==========================================================================*/
859void QCamera3HardwareInterface::getMetadataVendorTagOps(
860                    vendor_tag_query_ops_t* /*ops*/)
861{
862    /* Enable locks when we eventually add Vendor Tags */
863    /*
864    pthread_mutex_lock(&mMutex);
865
866    pthread_mutex_unlock(&mMutex);
867    */
868    return;
869}
870
871/*===========================================================================
872 * FUNCTION   : dump
873 *
874 * DESCRIPTION:
875 *
876 * PARAMETERS :
877 *
878 *
879 * RETURN     :
880 *==========================================================================*/
881void QCamera3HardwareInterface::dump(int /*fd*/)
882{
883    /*Enable lock when we implement this function*/
884    /*
885    pthread_mutex_lock(&mMutex);
886
887    pthread_mutex_unlock(&mMutex);
888    */
889    return;
890}
891
892/*===========================================================================
893 * FUNCTION   : captureResultCb
894 *
895 * DESCRIPTION: Callback handler for all capture result
896 *              (streams, as well as metadata)
897 *
898 * PARAMETERS :
899 *   @metadata : metadata information
900 *   @buffer   : actual gralloc buffer to be returned to frameworks.
901 *               NULL if metadata.
902 *
903 * RETURN     : NONE
904 *==========================================================================*/
905void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
906                camera3_stream_buffer_t *buffer, uint32_t frame_number)
907{
908    pthread_mutex_lock(&mRequestLock);
909
910    if (metadata_buf) {
911        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
912        int32_t frame_number_valid = *(int32_t *)
913            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
914        uint32_t frame_number = *(uint32_t *)
915            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
916        const struct timeval *tv = (const struct timeval *)
917            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
918        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
919            tv->tv_usec * NSEC_PER_USEC;
920
921        if (!frame_number_valid) {
922            ALOGD("%s: Not a valid frame number, used as SOF only", __func__);
923            mMetadataChannel->bufDone(metadata_buf);
924            goto done_metadata;
925        }
926        ALOGD("%s: valid frame_number = %d, capture_time = %lld", __func__,
927                frame_number, capture_time);
928
929        // Go through the pending requests info and send shutter/results to frameworks
930        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
931                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
932            camera3_capture_result_t result;
933            camera3_notify_msg_t notify_msg;
934            ALOGD("%s: frame_number in the list is %d", __func__, i->frame_number);
935
936            // Flush out all entries with less or equal frame numbers.
937
938            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
939            //Right now it's the same as metadata timestamp
940
941            //TODO: When there is metadata drop, how do we derive the timestamp of
942            //dropped frames? For now, we fake the dropped timestamp by substracting
943            //from the reported timestamp
944            nsecs_t current_capture_time = capture_time -
945                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
946
947            // Send shutter notify to frameworks
948            notify_msg.type = CAMERA3_MSG_SHUTTER;
949            notify_msg.message.shutter.frame_number = i->frame_number;
950            notify_msg.message.shutter.timestamp = current_capture_time;
951            mCallbackOps->notify(mCallbackOps, &notify_msg);
952            ALOGD("%s: notify frame_number = %d, capture_time = %lld", __func__,
953                    i->frame_number, capture_time);
954
955            // Send empty metadata with already filled buffers for dropped metadata
956            // and send valid metadata with already filled buffers for current metadata
957            if (i->frame_number < frame_number) {
958                CameraMetadata emptyMetadata(1, 0);
959                emptyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
960                        &current_capture_time, 1);
961                emptyMetadata.update(ANDROID_REQUEST_ID,
962                        &(i->request_id), 1);
963                result.result = emptyMetadata.release();
964            } else {
965                result.result = translateCbMetadataToResultMetadata(metadata,
966                        current_capture_time, i->request_id);
967                // Return metadata buffer
968                mMetadataChannel->bufDone(metadata_buf);
969            }
970            if (!result.result) {
971                ALOGE("%s: metadata is NULL", __func__);
972            }
973            result.frame_number = i->frame_number;
974            result.num_output_buffers = 0;
975            result.output_buffers = NULL;
976            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
977                    j != i->buffers.end(); j++) {
978                if (j->buffer) {
979                    result.num_output_buffers++;
980                }
981            }
982
983            if (result.num_output_buffers > 0) {
984                camera3_stream_buffer_t *result_buffers =
985                    new camera3_stream_buffer_t[result.num_output_buffers];
986                if (!result_buffers) {
987                    ALOGE("%s: Fatal error: out of memory", __func__);
988                }
989                size_t result_buffers_idx = 0;
990                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
991                        j != i->buffers.end(); j++) {
992                    if (j->buffer) {
993                        result_buffers[result_buffers_idx++] = *(j->buffer);
994                        free(j->buffer);
995                        mPendingBuffersMap.editValueFor(j->stream)--;
996                    }
997                }
998                result.output_buffers = result_buffers;
999
1000                mCallbackOps->process_capture_result(mCallbackOps, &result);
1001                ALOGD("%s: meta frame_number = %d, capture_time = %lld",
1002                        __func__, result.frame_number, current_capture_time);
1003                free_camera_metadata((camera_metadata_t *)result.result);
1004                delete[] result_buffers;
1005            } else {
1006                mCallbackOps->process_capture_result(mCallbackOps, &result);
1007                ALOGD("%s: meta frame_number = %d, capture_time = %lld",
1008                        __func__, result.frame_number, current_capture_time);
1009                free_camera_metadata((camera_metadata_t *)result.result);
1010            }
1011            // erase the element from the list
1012            i = mPendingRequestsList.erase(i);
1013        }
1014
1015
1016done_metadata:
1017        bool max_buffers_dequeued = false;
1018        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1019            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1020            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1021            if (queued_buffers == stream->max_buffers) {
1022                max_buffers_dequeued = true;
1023                break;
1024            }
1025        }
1026        if (!max_buffers_dequeued) {
1027            // Unblock process_capture_request
1028            mPendingRequest = 0;
1029            pthread_cond_signal(&mRequestCond);
1030        }
1031    } else {
1032        // If the frame number doesn't exist in the pending request list,
1033        // directly send the buffer to the frameworks, and update pending buffers map
1034        // Otherwise, book-keep the buffer.
1035        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1036        while (i != mPendingRequestsList.end() && i->frame_number != frame_number)
1037            i++;
1038        if (i == mPendingRequestsList.end()) {
1039            // Verify all pending requests frame_numbers are greater
1040            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1041                    j != mPendingRequestsList.end(); j++) {
1042                if (j->frame_number < frame_number) {
1043                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1044                            __func__, j->frame_number, frame_number);
1045                }
1046            }
1047            camera3_capture_result_t result;
1048            result.result = NULL;
1049            result.frame_number = frame_number;
1050            result.num_output_buffers = 1;
1051            result.output_buffers = buffer;
1052            ALOGD("%s: result frame_number = %d, buffer = %p",
1053                    __func__, frame_number, buffer);
1054            mPendingBuffersMap.editValueFor(buffer->stream)--;
1055            mCallbackOps->process_capture_result(mCallbackOps, &result);
1056        } else {
1057            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1058                    j != i->buffers.end(); j++) {
1059                if (j->stream == buffer->stream) {
1060                    if (j->buffer != NULL) {
1061                        ALOGE("%s: Error: buffer is already set", __func__);
1062                    } else {
1063                        j->buffer = (camera3_stream_buffer_t *)malloc(
1064                                sizeof(camera3_stream_buffer_t));
1065                        *(j->buffer) = *buffer;
1066                        ALOGD("%s: cache buffer %p at result frame_number %d",
1067                                __func__, buffer, frame_number);
1068                    }
1069                }
1070            }
1071        }
1072    }
1073
1074    pthread_mutex_unlock(&mRequestLock);
1075    return;
1076}
1077
1078/*===========================================================================
1079 * FUNCTION   : translateCbMetadataToResultMetadata
1080 *
1081 * DESCRIPTION:
1082 *
1083 * PARAMETERS :
1084 *   @metadata : metadata information from callback
1085 *
1086 * RETURN     : camera_metadata_t*
1087 *              metadata in a format specified by fwk
1088 *==========================================================================*/
1089camera_metadata_t*
1090QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1091                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1092                                 int32_t request_id)
1093{
1094    CameraMetadata camMetadata;
1095    camera_metadata_t* resultMetadata;
1096
1097
1098    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1099    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1100
1101    /*CAM_INTF_META_HISTOGRAM - TODO*/
1102    /*cam_hist_stats_t  *histogram =
1103      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1104      metadata);*/
1105
1106    /*face detection*/
1107    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1108        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1109    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1110    int32_t faceIds[numFaces];
1111    uint8_t faceScores[numFaces];
1112    int32_t faceRectangles[numFaces * 4];
1113    int32_t faceLandmarks[numFaces * 6];
1114    int j = 0, k = 0;
1115    for (int i = 0; i < numFaces; i++) {
1116        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1117        faceScores[i] = faceDetectionInfo->faces[i].score;
1118        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1119                faceRectangles+j, -1);
1120        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1121        j+= 4;
1122        k+= 6;
1123    }
1124    camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1125    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1126    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1127            faceRectangles, numFaces*4);
1128    camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1129            faceLandmarks, numFaces*6);
1130
1131
1132    /*autofocus - TODO*/
1133    /*cam_auto_focus_data_t  *afData =(cam_auto_focus_data_t *)
1134      POINTER_OF(CAM_INTF_META_AUTOFOCUS_DATA,metadata);*/
1135
1136    uint8_t  *color_correct_mode =
1137        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1138    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1139
1140    int32_t  *ae_precapture_id =
1141        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1142    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1143
1144    /*aec regions*/
1145    cam_area_t  *hAeRegions =
1146        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1147    int32_t aeRegions[5];
1148    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1149    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1150
1151    uint8_t  *ae_state =
1152        (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1153    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1154
1155    uint8_t  *focusMode =
1156        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1157    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1158
1159    /*af regions*/
1160    cam_area_t  *hAfRegions =
1161        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1162    int32_t afRegions[5];
1163    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1164    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1165
1166    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1167    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1168
1169    int32_t  *afTriggerId =
1170        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1171    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1172
1173    uint8_t  *whiteBalance =
1174        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1175    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1176
1177    /*awb regions*/
1178    cam_area_t  *hAwbRegions =
1179        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1180    int32_t awbRegions[5];
1181    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1182    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1183
1184    uint8_t  *whiteBalanceState =
1185        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1186    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1187
1188    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1189    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1190
1191    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1192    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1193
1194    uint8_t  *flashPower =
1195        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1196    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1197
1198    int64_t  *flashFiringTime =
1199        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1200    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1201
1202    /*int32_t  *ledMode =
1203      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1204      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1205
1206    uint8_t  *flashState =
1207        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1208    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1209
1210    uint8_t  *hotPixelMode =
1211        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1212    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1213
1214    float  *lensAperture =
1215        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1216    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1217
1218    float  *filterDensity =
1219        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1220    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1221
1222    float  *focalLength =
1223        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1224    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1225
1226    float  *focusDistance =
1227        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1228    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1229
1230    float  *focusRange =
1231        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1232    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1233
1234    uint8_t  *opticalStab =
1235        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1236    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1237
1238    /*int32_t  *focusState =
1239      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1240      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1241
1242    uint8_t  *noiseRedMode =
1243        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1244    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1245
1246    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1247
1248    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1249        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1250    int32_t scalerCropRegion[3];
1251    scalerCropRegion[0] = hScalerCropRegion->left;
1252    scalerCropRegion[1] = hScalerCropRegion->top;
1253    scalerCropRegion[2] = hScalerCropRegion->width;
1254    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 3);
1255
1256    int64_t  *sensorExpTime =
1257        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1258    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1259
1260    int64_t  *sensorFameDuration =
1261        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1262    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1263
1264    int32_t  *sensorSensitivity =
1265        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1266    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1267
1268    uint8_t  *shadingMode =
1269        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1270    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1271
1272    uint8_t  *faceDetectMode =
1273        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1274    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1275
1276    uint8_t  *histogramMode =
1277        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1278    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1279
1280    uint8_t  *sharpnessMapMode =
1281        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1282    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1283            sharpnessMapMode, 1);
1284
1285    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1286    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1287        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1288    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1289            (int32_t*)sharpnessMap->sharpness,
1290            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1291
1292    resultMetadata = camMetadata.release();
1293    return resultMetadata;
1294}
1295
1296/*===========================================================================
1297 * FUNCTION   : convertToRegions
1298 *
1299 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1300 *
1301 * PARAMETERS :
1302 *   @rect   : cam_rect_t struct to convert
1303 *   @region : int32_t destination array
1304 *   @weight : if we are converting from cam_area_t, weight is valid
1305 *             else weight = -1
1306 *
1307 *==========================================================================*/
1308void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1309    region[0] = rect.left;
1310    region[1] = rect.top;
1311    region[2] = rect.left + rect.width;
1312    region[3] = rect.top + rect.height;
1313    if (weight > -1) {
1314        region[4] = weight;
1315    }
1316}
1317
1318/*===========================================================================
1319 * FUNCTION   : convertFromRegions
1320 *
1321 * DESCRIPTION: helper method to convert from array to cam_rect_t
1322 *
1323 * PARAMETERS :
1324 *   @rect   : cam_rect_t struct to convert
1325 *   @region : int32_t destination array
1326 *   @weight : if we are converting from cam_area_t, weight is valid
1327 *             else weight = -1
1328 *
1329 *==========================================================================*/
1330void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1331                                                   const camera_metadata_t *settings,
1332                                                   uint32_t tag){
1333    CameraMetadata frame_settings;
1334    frame_settings = settings;
1335    int32_t x_min = frame_settings.find(tag).data.i32[0];
1336    int32_t y_min = frame_settings.find(tag).data.i32[1];
1337    int32_t x_max = frame_settings.find(tag).data.i32[2];
1338    int32_t y_max = frame_settings.find(tag).data.i32[3];
1339    roi->weight = frame_settings.find(tag).data.i32[4];
1340    roi->rect.left = x_min;
1341    roi->rect.top = y_min;
1342    roi->rect.width = x_max - x_min;
1343    roi->rect.height = y_max - y_min;
1344}
1345
1346/*===========================================================================
1347 * FUNCTION   : convertLandmarks
1348 *
1349 * DESCRIPTION: helper method to extract the landmarks from face detection info
1350 *
1351 * PARAMETERS :
1352 *   @face   : cam_rect_t struct to convert
1353 *   @landmarks : int32_t destination array
1354 *
1355 *
1356 *==========================================================================*/
1357void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1358{
1359    landmarks[0] = face.left_eye_center.x;
1360    landmarks[1] = face.left_eye_center.y;
1361    landmarks[2] = face.right_eye_center.y;
1362    landmarks[3] = face.right_eye_center.y;
1363    landmarks[4] = face.mouth_center.x;
1364    landmarks[5] = face.mouth_center.y;
1365}
1366
1367#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1368/*===========================================================================
1369 * FUNCTION   : initCapabilities
1370 *
1371 * DESCRIPTION: initialize camera capabilities in static data struct
1372 *
1373 * PARAMETERS :
1374 *   @cameraId  : camera Id
1375 *
1376 * RETURN     : int32_t type of status
1377 *              NO_ERROR  -- success
1378 *              none-zero failure code
1379 *==========================================================================*/
1380int QCamera3HardwareInterface::initCapabilities(int cameraId)
1381{
1382    int rc = 0;
1383    mm_camera_vtbl_t *cameraHandle = NULL;
1384    QCamera3HeapMemory *capabilityHeap = NULL;
1385
1386    cameraHandle = camera_open(cameraId);
1387    if (!cameraHandle) {
1388        ALOGE("%s: camera_open failed", __func__);
1389        rc = -1;
1390        goto open_failed;
1391    }
1392
1393    capabilityHeap = new QCamera3HeapMemory();
1394    if (capabilityHeap == NULL) {
1395        ALOGE("%s: creation of capabilityHeap failed", __func__);
1396        goto heap_creation_failed;
1397    }
1398    /* Allocate memory for capability buffer */
1399    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1400    if(rc != OK) {
1401        ALOGE("%s: No memory for cappability", __func__);
1402        goto allocate_failed;
1403    }
1404
1405    /* Map memory for capability buffer */
1406    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1407    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1408                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1409                                capabilityHeap->getFd(0),
1410                                sizeof(cam_capability_t));
1411    if(rc < 0) {
1412        ALOGE("%s: failed to map capability buffer", __func__);
1413        goto map_failed;
1414    }
1415
1416    /* Query Capability */
1417    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1418    if(rc < 0) {
1419        ALOGE("%s: failed to query capability",__func__);
1420        goto query_failed;
1421    }
1422    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1423    if (!gCamCapability[cameraId]) {
1424        ALOGE("%s: out of memory", __func__);
1425        goto query_failed;
1426    }
1427    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1428                                        sizeof(cam_capability_t));
1429    rc = 0;
1430
1431query_failed:
1432    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1433                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1434map_failed:
1435    capabilityHeap->deallocate();
1436allocate_failed:
1437    delete capabilityHeap;
1438heap_creation_failed:
1439    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1440    cameraHandle = NULL;
1441open_failed:
1442    return rc;
1443}
1444
1445/*===========================================================================
1446 * FUNCTION   : initParameters
1447 *
1448 * DESCRIPTION: initialize camera parameters
1449 *
1450 * PARAMETERS :
1451 *
1452 * RETURN     : int32_t type of status
1453 *              NO_ERROR  -- success
1454 *              none-zero failure code
1455 *==========================================================================*/
1456int QCamera3HardwareInterface::initParameters()
1457{
1458    int rc = 0;
1459
1460    //Allocate Set Param Buffer
1461    mParamHeap = new QCamera3HeapMemory();
1462    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1463    if(rc != OK) {
1464        rc = NO_MEMORY;
1465        ALOGE("Failed to allocate SETPARM Heap memory");
1466        delete mParamHeap;
1467        mParamHeap = NULL;
1468        return rc;
1469    }
1470
1471    //Map memory for parameters buffer
1472    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1473            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1474            mParamHeap->getFd(0),
1475            sizeof(parm_buffer_t));
1476    if(rc < 0) {
1477        ALOGE("%s:failed to map SETPARM buffer",__func__);
1478        rc = FAILED_TRANSACTION;
1479        mParamHeap->deallocate();
1480        delete mParamHeap;
1481        mParamHeap = NULL;
1482        return rc;
1483    }
1484
1485    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1486    return rc;
1487}
1488
1489/*===========================================================================
1490 * FUNCTION   : deinitParameters
1491 *
1492 * DESCRIPTION: de-initialize camera parameters
1493 *
1494 * PARAMETERS :
1495 *
1496 * RETURN     : NONE
1497 *==========================================================================*/
1498void QCamera3HardwareInterface::deinitParameters()
1499{
1500    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1501            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1502
1503    mParamHeap->deallocate();
1504    delete mParamHeap;
1505    mParamHeap = NULL;
1506
1507    mParameters = NULL;
1508}
1509
1510/*===========================================================================
1511 * FUNCTION   : calcMaxJpegSize
1512 *
1513 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1514 *
1515 * PARAMETERS :
1516 *
1517 * RETURN     : max_jpeg_size
1518 *==========================================================================*/
1519int QCamera3HardwareInterface::calcMaxJpegSize()
1520{
1521    int32_t max_jpeg_size = 0;
1522    int temp_width, temp_height;
1523    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1524        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1525        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1526        if (temp_width * temp_height > max_jpeg_size ) {
1527            max_jpeg_size = temp_width * temp_height;
1528        }
1529    }
1530    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1531    return max_jpeg_size;
1532}
1533
1534/*===========================================================================
1535 * FUNCTION   : initStaticMetadata
1536 *
1537 * DESCRIPTION: initialize the static metadata
1538 *
1539 * PARAMETERS :
1540 *   @cameraId  : camera Id
1541 *
1542 * RETURN     : int32_t type of status
1543 *              0  -- success
1544 *              non-zero failure code
1545 *==========================================================================*/
1546int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1547{
1548    int rc = 0;
1549    CameraMetadata staticInfo;
1550    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1551    /*HAL 3 only*/
1552    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1553                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1554
1555    /*hard coded for now but this should come from sensor*/
1556    float min_focus_distance;
1557    if(facingBack){
1558        min_focus_distance = 10;
1559    } else {
1560        min_focus_distance = 0;
1561    }
1562    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1563                    &min_focus_distance, 1);
1564
1565    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1566                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1567
1568    /*should be using focal lengths but sensor doesn't provide that info now*/
1569    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1570                      &gCamCapability[cameraId]->focal_length,
1571                      1);
1572
1573    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1574                      gCamCapability[cameraId]->apertures,
1575                      gCamCapability[cameraId]->apertures_count);
1576
1577    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1578                gCamCapability[cameraId]->filter_densities,
1579                gCamCapability[cameraId]->filter_densities_count);
1580
1581
1582    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1583                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1584                      gCamCapability[cameraId]->optical_stab_modes_count);
1585
1586    staticInfo.update(ANDROID_LENS_POSITION,
1587                      gCamCapability[cameraId]->lens_position,
1588                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1589
1590    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1591                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1592    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1593                      lens_shading_map_size,
1594                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1595
1596    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map,
1597            sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float));
1598
1599    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1600                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1601    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1602            geo_correction_map_size,
1603            sizeof(geo_correction_map_size)/sizeof(int32_t));
1604
1605    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1606                       gCamCapability[cameraId]->geo_correction_map,
1607                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1608
1609    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1610            gCamCapability[cameraId]->sensor_physical_size, 2);
1611
1612    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1613            gCamCapability[cameraId]->exposure_time_range, 2);
1614
1615    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1616            &gCamCapability[cameraId]->max_frame_duration, 1);
1617
1618
1619    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1620                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1621
1622    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1623                                               gCamCapability[cameraId]->pixel_array_size.height};
1624    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1625                      pixel_array_size, 2);
1626
1627    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width,
1628                                                gCamCapability[cameraId]->active_array_size.height};
1629
1630    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1631                      active_array_size, 2);
1632
1633    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1634            &gCamCapability[cameraId]->white_level, 1);
1635
1636    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1637            gCamCapability[cameraId]->black_level_pattern, 4);
1638
1639    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1640                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1641
1642    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1643                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1644
1645    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1646                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1647    /*hardcode 0 for now*/
1648    int32_t max_face_count = 0;
1649    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1650                      &max_face_count, 1);
1651
1652    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1653                      &gCamCapability[cameraId]->histogram_size, 1);
1654
1655    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1656            &gCamCapability[cameraId]->max_histogram_count, 1);
1657
1658    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1659                                                gCamCapability[cameraId]->sharpness_map_size.height};
1660
1661    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1662            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1663
1664    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1665            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1666
1667
1668    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1669                      &gCamCapability[cameraId]->raw_min_duration,
1670                       1);
1671
1672    int32_t scalar_formats[CAM_FORMAT_MAX];
1673    int scalar_formats_count = gCamCapability[cameraId]->supported_scalar_format_cnt;
1674    for (int i = 0; i < scalar_formats_count; i++) {
1675        scalar_formats[i] = getScalarFormat(gCamCapability[cameraId]->supported_scalar_fmts[i]);
1676    }
1677    scalar_formats[scalar_formats_count] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1678    scalar_formats_count++;
1679    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1680                      scalar_formats,
1681                      scalar_formats_count);
1682
1683    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1684    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1685              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1686              available_processed_sizes);
1687    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1688                available_processed_sizes,
1689                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1690
1691    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1692    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1693                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1694                 available_fps_ranges);
1695    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1696            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1697
1698    camera_metadata_rational exposureCompensationStep = {
1699            gCamCapability[cameraId]->exp_compensation_step.numerator,
1700            gCamCapability[cameraId]->exp_compensation_step.denominator};
1701    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1702                      &exposureCompensationStep, 1);
1703
1704    /*TO DO*/
1705    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1706    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1707                      availableVstabModes, sizeof(availableVstabModes));
1708
1709    /*HAL 1 and HAL 3 common*/
1710    float maxZoom = 10;
1711    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1712            &maxZoom, 1);
1713
1714    int32_t max3aRegions = 1;
1715    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1716            &max3aRegions, 1);
1717
1718    uint8_t availableFaceDetectModes[] = {
1719            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1720    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1721                      availableFaceDetectModes,
1722                      sizeof(availableFaceDetectModes));
1723
1724    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1725                                       gCamCapability[cameraId]->raw_dim.height};
1726    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1727                      raw_size,
1728                      sizeof(raw_size)/sizeof(uint32_t));
1729
1730    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1731                                                        gCamCapability[cameraId]->exposure_compensation_max};
1732    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1733            exposureCompensationRange,
1734            sizeof(exposureCompensationRange)/sizeof(int32_t));
1735
1736    uint8_t lensFacing = (facingBack) ?
1737            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1738    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1739
1740    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1741    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1742              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1743              available_jpeg_sizes);
1744    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1745                available_jpeg_sizes,
1746                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1747
1748    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1749                      available_thumbnail_sizes,
1750                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1751
1752    int32_t max_jpeg_size = 0;
1753    int temp_width, temp_height;
1754    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1755        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1756        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1757        if (temp_width * temp_height > max_jpeg_size ) {
1758            max_jpeg_size = temp_width * temp_height;
1759        }
1760    }
1761    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1762    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1763                      &max_jpeg_size, 1);
1764
1765    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1766    int32_t size = 0;
1767    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1768        int val = lookupFwkName(EFFECT_MODES_MAP,
1769                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1770                                   gCamCapability[cameraId]->supported_effects[i]);
1771        if (val != NAME_NOT_FOUND) {
1772            avail_effects[size] = (uint8_t)val;
1773            size++;
1774        }
1775    }
1776    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1777                      avail_effects,
1778                      size);
1779
1780    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1781    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1782    int32_t supported_scene_modes_cnt = 0;
1783    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1784        int val = lookupFwkName(SCENE_MODES_MAP,
1785                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1786                                gCamCapability[cameraId]->supported_scene_modes[i]);
1787        if (val != NAME_NOT_FOUND) {
1788            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1789            supported_indexes[supported_scene_modes_cnt] = i;
1790            supported_scene_modes_cnt++;
1791        }
1792    }
1793
1794    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1795                      avail_scene_modes,
1796                      supported_scene_modes_cnt);
1797
1798    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1799    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1800                      supported_scene_modes_cnt,
1801                      scene_mode_overrides,
1802                      supported_indexes);
1803    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1804                      scene_mode_overrides,
1805                      supported_scene_modes_cnt*3);
1806
1807    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1808    size = 0;
1809    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1810        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1811                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1812                                 gCamCapability[cameraId]->supported_antibandings[i]);
1813        if (val != NAME_NOT_FOUND) {
1814            avail_antibanding_modes[size] = (uint8_t)val;
1815            size++;
1816        }
1817
1818    }
1819    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1820                      avail_antibanding_modes,
1821                      size);
1822
1823    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1824    size = 0;
1825    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1826        int val = lookupFwkName(FOCUS_MODES_MAP,
1827                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1828                                gCamCapability[cameraId]->supported_focus_modes[i]);
1829        if (val != NAME_NOT_FOUND) {
1830            avail_af_modes[size] = (uint8_t)val;
1831            size++;
1832        }
1833    }
1834    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1835                      avail_af_modes,
1836                      size);
1837
1838    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1839    size = 0;
1840    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1841        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1842                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1843                                    gCamCapability[cameraId]->supported_white_balances[i]);
1844        if (val != NAME_NOT_FOUND) {
1845            avail_awb_modes[size] = (uint8_t)val;
1846            size++;
1847        }
1848    }
1849    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1850                      avail_awb_modes,
1851                      size);
1852
1853    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1854    size = 0;
1855    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1856        int val = lookupFwkName(FLASH_MODES_MAP,
1857                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1858                                gCamCapability[cameraId]->supported_flash_modes[i]);
1859        if (val != NAME_NOT_FOUND) {
1860            avail_flash_modes[size] = (uint8_t)val;
1861            size++;
1862        }
1863    }
1864    static uint8_t flashAvailable = 0;
1865    if (size > 1) {
1866        //flash is supported
1867        flashAvailable = 1;
1868    }
1869    staticInfo.update(ANDROID_FLASH_MODE,
1870                      avail_flash_modes,
1871                      size);
1872
1873    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
1874            &flashAvailable, 1);
1875
1876    uint8_t avail_ae_modes[5];
1877    size = 0;
1878    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
1879        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
1880        size++;
1881    }
1882    if (flashAvailable) {
1883        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
1884        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
1885        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
1886    }
1887    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1888                      avail_ae_modes,
1889                      size);
1890
1891    gStaticMetadata[cameraId] = staticInfo.release();
1892    return rc;
1893}
1894
1895/*===========================================================================
1896 * FUNCTION   : makeTable
1897 *
1898 * DESCRIPTION: make a table of sizes
1899 *
1900 * PARAMETERS :
1901 *
1902 *
1903 *==========================================================================*/
1904void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
1905                                          int32_t* sizeTable)
1906{
1907    int j = 0;
1908    for (int i = 0; i < size; i++) {
1909        sizeTable[j] = dimTable[i].width;
1910        sizeTable[j+1] = dimTable[i].height;
1911        j+=2;
1912    }
1913}
1914
1915/*===========================================================================
1916 * FUNCTION   : makeFPSTable
1917 *
1918 * DESCRIPTION: make a table of fps ranges
1919 *
1920 * PARAMETERS :
1921 *
1922 *==========================================================================*/
1923void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
1924                                          int32_t* fpsRangesTable)
1925{
1926    int j = 0;
1927    for (int i = 0; i < size; i++) {
1928        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
1929        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
1930        j+=2;
1931    }
1932}
1933
1934/*===========================================================================
1935 * FUNCTION   : makeOverridesList
1936 *
1937 * DESCRIPTION: make a list of scene mode overrides
1938 *
1939 * PARAMETERS :
1940 *
1941 *
1942 *==========================================================================*/
1943void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
1944                                                  uint8_t size, uint8_t* overridesList,
1945                                                  uint8_t* supported_indexes)
1946{
1947    /*daemon will give a list of overrides for all scene modes.
1948      However we should send the fwk only the overrides for the scene modes
1949      supported by the framework*/
1950    int j = 0, index = 0;
1951    for (int i = 0; i < size; i++) {
1952        index = supported_indexes[i];
1953        overridesList[j] = (int32_t)overridesTable[index].ae_mode;
1954        overridesList[j+1] = (int32_t)overridesTable[index].awb_mode;
1955        overridesList[j+2] = (int32_t)overridesTable[index].af_mode;
1956        j+=3;
1957    }
1958}
1959
1960/*===========================================================================
1961 * FUNCTION   : getPreviewHalPixelFormat
1962 *
1963 * DESCRIPTION: convert the format to type recognized by framework
1964 *
1965 * PARAMETERS : format : the format from backend
1966 *
1967 ** RETURN    : format recognized by framework
1968 *
1969 *==========================================================================*/
1970int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
1971{
1972    int32_t halPixelFormat;
1973
1974    switch (format) {
1975    case CAM_FORMAT_YUV_420_NV12:
1976        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
1977        break;
1978    case CAM_FORMAT_YUV_420_NV21:
1979        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1980        break;
1981    case CAM_FORMAT_YUV_420_NV21_ADRENO:
1982        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
1983        break;
1984    case CAM_FORMAT_YUV_420_YV12:
1985        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
1986        break;
1987    case CAM_FORMAT_YUV_422_NV16:
1988    case CAM_FORMAT_YUV_422_NV61:
1989    default:
1990        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1991        break;
1992    }
1993    return halPixelFormat;
1994}
1995
1996/*===========================================================================
1997 * FUNCTION   : AddSetParmEntryToBatch
1998 *
1999 * DESCRIPTION: add set parameter entry into batch
2000 *
2001 * PARAMETERS :
2002 *   @p_table     : ptr to parameter buffer
2003 *   @paramType   : parameter type
2004 *   @paramLength : length of parameter value
2005 *   @paramValue  : ptr to parameter value
2006 *
2007 * RETURN     : int32_t type of status
2008 *              NO_ERROR  -- success
2009 *              none-zero failure code
2010 *==========================================================================*/
2011int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2012                                                          cam_intf_parm_type_t paramType,
2013                                                          uint32_t paramLength,
2014                                                          void *paramValue)
2015{
2016    int position = paramType;
2017    int current, next;
2018
2019    /*************************************************************************
2020    *                 Code to take care of linking next flags                *
2021    *************************************************************************/
2022    current = GET_FIRST_PARAM_ID(p_table);
2023    if (position == current){
2024        //DO NOTHING
2025    } else if (position < current){
2026        SET_NEXT_PARAM_ID(position, p_table, current);
2027        SET_FIRST_PARAM_ID(p_table, position);
2028    } else {
2029        /* Search for the position in the linked list where we need to slot in*/
2030        while (position > GET_NEXT_PARAM_ID(current, p_table))
2031            current = GET_NEXT_PARAM_ID(current, p_table);
2032
2033        /*If node already exists no need to alter linking*/
2034        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2035            next = GET_NEXT_PARAM_ID(current, p_table);
2036            SET_NEXT_PARAM_ID(current, p_table, position);
2037            SET_NEXT_PARAM_ID(position, p_table, next);
2038        }
2039    }
2040
2041    /*************************************************************************
2042    *                   Copy contents into entry                             *
2043    *************************************************************************/
2044
2045    if (paramLength > sizeof(parm_type_t)) {
2046        ALOGE("%s:Size of input larger than max entry size",__func__);
2047        return BAD_VALUE;
2048    }
2049    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2050    return NO_ERROR;
2051}
2052
2053/*===========================================================================
2054 * FUNCTION   : lookupFwkName
2055 *
2056 * DESCRIPTION: In case the enum is not same in fwk and backend
2057 *              make sure the parameter is correctly propogated
2058 *
2059 * PARAMETERS  :
2060 *   @arr      : map between the two enums
2061 *   @len      : len of the map
2062 *   @hal_name : name of the hal_parm to map
2063 *
2064 * RETURN     : int type of status
2065 *              fwk_name  -- success
2066 *              none-zero failure code
2067 *==========================================================================*/
2068int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2069                                             int len, int hal_name)
2070{
2071
2072    for (int i = 0; i < len; i++) {
2073        if (arr[i].hal_name == hal_name)
2074            return arr[i].fwk_name;
2075    }
2076
2077    /* Not able to find matching framework type is not necessarily
2078     * an error case. This happens when mm-camera supports more attributes
2079     * than the frameworks do */
2080    ALOGD("%s: Cannot find matching framework type", __func__);
2081    return NAME_NOT_FOUND;
2082}
2083
2084/*===========================================================================
2085 * FUNCTION   : lookupHalName
2086 *
2087 * DESCRIPTION: In case the enum is not same in fwk and backend
2088 *              make sure the parameter is correctly propogated
2089 *
2090 * PARAMETERS  :
2091 *   @arr      : map between the two enums
2092 *   @len      : len of the map
2093 *   @fwk_name : name of the hal_parm to map
2094 *
2095 * RETURN     : int32_t type of status
2096 *              hal_name  -- success
2097 *              none-zero failure code
2098 *==========================================================================*/
2099int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2100                                             int len, int fwk_name)
2101{
2102    for (int i = 0; i < len; i++) {
2103       if (arr[i].fwk_name == fwk_name)
2104           return arr[i].hal_name;
2105    }
2106    ALOGE("%s: Cannot find matching hal type", __func__);
2107    return NAME_NOT_FOUND;
2108}
2109
2110/*===========================================================================
2111 * FUNCTION   : getCapabilities
2112 *
2113 * DESCRIPTION: query camera capabilities
2114 *
2115 * PARAMETERS :
2116 *   @cameraId  : camera Id
2117 *   @info      : camera info struct to be filled in with camera capabilities
2118 *
2119 * RETURN     : int32_t type of status
2120 *              NO_ERROR  -- success
2121 *              none-zero failure code
2122 *==========================================================================*/
2123int QCamera3HardwareInterface::getCamInfo(int cameraId,
2124                                    struct camera_info *info)
2125{
2126    int rc = 0;
2127
2128    if (NULL == gCamCapability[cameraId]) {
2129        rc = initCapabilities(cameraId);
2130        if (rc < 0) {
2131            //pthread_mutex_unlock(&g_camlock);
2132            return rc;
2133        }
2134    }
2135
2136    if (NULL == gStaticMetadata[cameraId]) {
2137        rc = initStaticMetadata(cameraId);
2138        if (rc < 0) {
2139            return rc;
2140        }
2141    }
2142
2143    switch(gCamCapability[cameraId]->position) {
2144    case CAM_POSITION_BACK:
2145        info->facing = CAMERA_FACING_BACK;
2146        break;
2147
2148    case CAM_POSITION_FRONT:
2149        info->facing = CAMERA_FACING_FRONT;
2150        break;
2151
2152    default:
2153        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2154        rc = -1;
2155        break;
2156    }
2157
2158
2159    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2160    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2161    info->static_camera_characteristics = gStaticMetadata[cameraId];
2162
2163    return rc;
2164}
2165
2166/*===========================================================================
2167 * FUNCTION   : translateMetadata
2168 *
2169 * DESCRIPTION: translate the metadata into camera_metadata_t
2170 *
2171 * PARAMETERS : type of the request
2172 *
2173 *
2174 * RETURN     : success: camera_metadata_t*
2175 *              failure: NULL
2176 *
2177 *==========================================================================*/
2178camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2179{
2180    pthread_mutex_lock(&mMutex);
2181
2182    if (mDefaultMetadata[type] != NULL) {
2183        pthread_mutex_unlock(&mMutex);
2184        return mDefaultMetadata[type];
2185    }
2186    //first time we are handling this request
2187    //fill up the metadata structure using the wrapper class
2188    CameraMetadata settings;
2189    //translate from cam_capability_t to camera_metadata_tag_t
2190    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2191    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2192
2193    /*control*/
2194
2195    uint8_t controlIntent = 0;
2196    switch (type) {
2197      case CAMERA3_TEMPLATE_PREVIEW:
2198        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2199        break;
2200      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2201        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2202        break;
2203      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2204        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2205        break;
2206      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2207        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2208        break;
2209      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2210        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2211        break;
2212      default:
2213        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2214        break;
2215    }
2216    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2217
2218    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2219            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2220
2221    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2222    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2223
2224    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2225    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2226
2227    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2228    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2229
2230    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2231    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2232
2233    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2234    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2235
2236    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2237    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2238
2239    static uint8_t focusMode;
2240    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2241        ALOGE("%s: Setting focus mode to auto", __func__);
2242        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2243    } else {
2244        ALOGE("%s: Setting focus mode to off", __func__);
2245        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2246    }
2247    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2248
2249    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2250    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2251
2252    /*flash*/
2253    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2254    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2255
2256
2257    /* lens */
2258    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2259    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2260
2261    if (gCamCapability[mCameraId]->filter_densities_count) {
2262        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2263        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2264                        gCamCapability[mCameraId]->filter_densities_count);
2265    }
2266
2267    /* TODO: Enable focus lengths once supported*/
2268    /*if (gCamCapability[mCameraId]->focal_lengths_count) {
2269        float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0];
2270        settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2271    }*/
2272
2273    mDefaultMetadata[type] = settings.release();
2274
2275    pthread_mutex_unlock(&mMutex);
2276    return mDefaultMetadata[type];
2277}
2278
2279/*===========================================================================
2280 * FUNCTION   : setFrameParameters
2281 *
2282 * DESCRIPTION: set parameters per frame as requested in the metadata from
2283 *              framework
2284 *
2285 * PARAMETERS :
2286 *   @settings  : frame settings information from framework
2287 *
2288 *
2289 * RETURN     : success: NO_ERROR
2290 *              failure:
2291 *==========================================================================*/
2292int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2293                                                  const camera_metadata_t *settings)
2294{
2295    /*translate from camera_metadata_t type to parm_type_t*/
2296    int rc = 0;
2297    if (settings == NULL && mFirstRequest) {
2298        /*settings cannot be null for the first request*/
2299        return BAD_VALUE;
2300    }
2301
2302    int32_t hal_version = CAM_HAL_V3;
2303
2304    memset(mParameters, 0, sizeof(parm_buffer_t));
2305    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2306    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2307                sizeof(hal_version), &hal_version);
2308
2309    /*we need to update the frame number in the parameters*/
2310    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2311                                sizeof(frame_id), &frame_id);
2312    if (rc < 0) {
2313        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2314        return BAD_VALUE;
2315    }
2316
2317    if(settings != NULL){
2318        rc = translateMetadataToParameters(settings);
2319    }
2320    /*set the parameters to backend*/
2321    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2322    return rc;
2323}
2324
2325/*===========================================================================
2326 * FUNCTION   : translateMetadataToParameters
2327 *
2328 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2329 *
2330 *
2331 * PARAMETERS :
2332 *   @settings  : frame settings information from framework
2333 *
2334 *
2335 * RETURN     : success: NO_ERROR
2336 *              failure:
2337 *==========================================================================*/
2338int QCamera3HardwareInterface::translateMetadataToParameters
2339                                  (const camera_metadata_t *settings)
2340{
2341    int rc = 0;
2342    CameraMetadata frame_settings;
2343    frame_settings = settings;
2344
2345
2346    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2347        int32_t antibandingMode =
2348            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2350                sizeof(antibandingMode), &antibandingMode);
2351    }
2352
2353    /*int32_t expCompensation = frame_settings.find().data.i32[0];
2354      rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2355      sizeof(expCompensation), &expCompensation);*/
2356    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2357        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2358        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2359                sizeof(aeLock), &aeLock);
2360    }
2361
2362    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2363        cam_fps_range_t fps_range;
2364        fps_range.min_fps =
2365            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2366        fps_range.max_fps =
2367            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2368        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2369                sizeof(fps_range), &fps_range);
2370    }
2371
2372    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2373        uint8_t fwk_focusMode =
2374            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2375        uint8_t focusMode = lookupHalName(FOCUS_MODES_MAP,
2376                                          sizeof(FOCUS_MODES_MAP),
2377                                          fwk_focusMode);
2378        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2379                sizeof(focusMode), &focusMode);
2380    }
2381
2382    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2383        uint8_t awbLock =
2384            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2385        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2386                sizeof(awbLock), &awbLock);
2387    }
2388
2389    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2390        uint8_t fwk_whiteLevel =
2391            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2392        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2393                sizeof(WHITE_BALANCE_MODES_MAP),
2394                fwk_whiteLevel);
2395        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2396                sizeof(whiteLevel), &whiteLevel);
2397    }
2398
2399    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2400        uint8_t fwk_effectMode =
2401            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2402        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2403                sizeof(EFFECT_MODES_MAP),
2404                fwk_effectMode);
2405        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2406                sizeof(effectMode), &effectMode);
2407    }
2408
2409    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2410        uint8_t fwk_aeMode =
2411            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2412        uint8_t aeMode;
2413        int32_t redeye;
2414        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2415            aeMode = CAM_AE_MODE_OFF;
2416        } else {
2417            aeMode = CAM_AE_MODE_ON;
2418        }
2419        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2420            redeye = 1;
2421        } else {
2422            redeye = 0;
2423        }
2424        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2425                                          sizeof(AE_FLASH_MODE_MAP),
2426                                          aeMode);
2427        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2428                sizeof(aeMode), &aeMode);
2429        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2430                sizeof(flashMode), &flashMode);
2431        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2432                sizeof(redeye), &redeye);
2433    }
2434
2435    if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) {
2436        int32_t metaFrameNumber =
2437            frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0];
2438        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2439                sizeof(metaFrameNumber), &metaFrameNumber);
2440    }
2441
2442    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2443        uint8_t colorCorrectMode =
2444            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2445        rc =
2446            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2447                    sizeof(colorCorrectMode), &colorCorrectMode);
2448    }
2449    cam_trigger_t aecTrigger;
2450    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2451    aecTrigger.trigger_id = -1;
2452    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2453        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2454        aecTrigger.trigger =
2455            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2456        aecTrigger.trigger_id =
2457            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2458    }
2459    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2460                                sizeof(aecTrigger), &aecTrigger);
2461
2462    /*af_trigger must come with a trigger id*/
2463    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2464        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2465        cam_trigger_t af_trigger;
2466        af_trigger.trigger =
2467            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2468        af_trigger.trigger_id =
2469            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2470        rc = AddSetParmEntryToBatch(mParameters,
2471                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2472    }
2473
2474    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2475        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2476        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2477                sizeof(metaMode), &metaMode);
2478    }
2479
2480    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2481        int32_t demosaic =
2482            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2483        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2484                sizeof(demosaic), &demosaic);
2485    }
2486
2487    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2488        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2489        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2490                sizeof(edgeMode), &edgeMode);
2491    }
2492
2493    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2494        int32_t edgeStrength =
2495            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2496        rc = AddSetParmEntryToBatch(mParameters,
2497                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2498    }
2499
2500    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2501        uint8_t flashMode =
2502            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2503        rc = AddSetParmEntryToBatch(mParameters,
2504                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2505    }
2506
2507    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2508        uint8_t flashPower =
2509            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2510        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2511                sizeof(flashPower), &flashPower);
2512    }
2513
2514    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2515        int64_t flashFiringTime =
2516            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2517        rc = AddSetParmEntryToBatch(mParameters,
2518                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2519    }
2520
2521    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2522        uint8_t geometricMode =
2523            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2524        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2525                sizeof(geometricMode), &geometricMode);
2526    }
2527
2528    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2529        uint8_t geometricStrength =
2530            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2531        rc = AddSetParmEntryToBatch(mParameters,
2532                CAM_INTF_META_GEOMETRIC_STRENGTH,
2533                sizeof(geometricStrength), &geometricStrength);
2534    }
2535
2536    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2537        uint8_t hotPixelMode =
2538            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2539        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2540                sizeof(hotPixelMode), &hotPixelMode);
2541    }
2542
2543    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2544        float lensAperture =
2545            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2546        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2547                sizeof(lensAperture), &lensAperture);
2548    }
2549
2550    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2551        float filterDensity =
2552            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2553        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2554                sizeof(filterDensity), &filterDensity);
2555    }
2556
2557    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2558        float focalLength =
2559            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2560        rc = AddSetParmEntryToBatch(mParameters,
2561                CAM_INTF_META_LENS_FOCAL_LENGTH,
2562                sizeof(focalLength), &focalLength);
2563    }
2564
2565    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2566        float focalDistance =
2567            frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2568        rc = AddSetParmEntryToBatch(mParameters,
2569                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2570                sizeof(focalDistance), &focalDistance);
2571    }
2572
2573    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2574        uint8_t optStabMode =
2575            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2576        rc = AddSetParmEntryToBatch(mParameters,
2577                CAM_INTF_META_LENS_OPT_STAB_MODE,
2578                sizeof(optStabMode), &optStabMode);
2579    }
2580
2581    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2582        uint8_t noiseRedMode =
2583            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2584        rc = AddSetParmEntryToBatch(mParameters,
2585                CAM_INTF_META_NOISE_REDUCTION_MODE,
2586                sizeof(noiseRedMode), &noiseRedMode);
2587    }
2588
2589    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2590        uint8_t noiseRedStrength =
2591            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2592        rc = AddSetParmEntryToBatch(mParameters,
2593                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2594                sizeof(noiseRedStrength), &noiseRedStrength);
2595    }
2596
2597    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2598        cam_crop_region_t scalerCropRegion;
2599        scalerCropRegion.left =
2600            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2601        scalerCropRegion.top =
2602            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2603        scalerCropRegion.width =
2604            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2605        scalerCropRegion.height =
2606            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2607        rc = AddSetParmEntryToBatch(mParameters,
2608                CAM_INTF_META_SCALER_CROP_REGION,
2609                sizeof(scalerCropRegion), &scalerCropRegion);
2610    }
2611
2612    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2613        int64_t sensorExpTime =
2614            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2615        rc = AddSetParmEntryToBatch(mParameters,
2616                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2617                sizeof(sensorExpTime), &sensorExpTime);
2618    }
2619
2620    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2621        int64_t sensorFrameDuration =
2622            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2623        rc = AddSetParmEntryToBatch(mParameters,
2624                CAM_INTF_META_SENSOR_FRAME_DURATION,
2625                sizeof(sensorFrameDuration), &sensorFrameDuration);
2626    }
2627
2628    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2629        int32_t sensorSensitivity =
2630            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2631        rc = AddSetParmEntryToBatch(mParameters,
2632                CAM_INTF_META_SENSOR_SENSITIVITY,
2633                sizeof(sensorSensitivity), &sensorSensitivity);
2634    }
2635
2636    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2637        int32_t shadingMode =
2638            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2639        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2640                sizeof(shadingMode), &shadingMode);
2641    }
2642
2643    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2644        uint8_t shadingStrength =
2645            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2646        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2647                sizeof(shadingStrength), &shadingStrength);
2648    }
2649
2650    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2651        uint8_t facedetectMode =
2652            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2653        rc = AddSetParmEntryToBatch(mParameters,
2654                CAM_INTF_META_STATS_FACEDETECT_MODE,
2655                sizeof(facedetectMode), &facedetectMode);
2656    }
2657
2658    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2659        uint8_t histogramMode =
2660            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2661        rc = AddSetParmEntryToBatch(mParameters,
2662                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2663                sizeof(histogramMode), &histogramMode);
2664    }
2665
2666    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2667        uint8_t sharpnessMapMode =
2668            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2669        rc = AddSetParmEntryToBatch(mParameters,
2670                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2671                sizeof(sharpnessMapMode), &sharpnessMapMode);
2672    }
2673
2674    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2675        uint8_t tonemapMode =
2676            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2677        rc = AddSetParmEntryToBatch(mParameters,
2678                CAM_INTF_META_TONEMAP_MODE,
2679                sizeof(tonemapMode), &tonemapMode);
2680    }
2681
2682    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2683        uint8_t captureIntent =
2684            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2685        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2686                sizeof(captureIntent), &captureIntent);
2687    }
2688
2689    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2690        cam_area_t roi;
2691        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2692        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2693                sizeof(roi), &roi);
2694    }
2695
2696    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2697        cam_area_t roi;
2698        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2699        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2700                sizeof(roi), &roi);
2701    }
2702
2703    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2704        cam_area_t roi;
2705        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2706        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2707                sizeof(roi), &roi);
2708    }
2709    return rc;
2710}
2711
2712/*===========================================================================
2713 * FUNCTION   : getJpegSettings
2714 *
2715 * DESCRIPTION: save the jpeg settings in the HAL
2716 *
2717 *
2718 * PARAMETERS :
2719 *   @settings  : frame settings information from framework
2720 *
2721 *
2722 * RETURN     : success: NO_ERROR
2723 *              failure:
2724 *==========================================================================*/
2725int QCamera3HardwareInterface::getJpegSettings
2726                                  (const camera_metadata_t *settings)
2727{
2728    if (mJpegSettings) {
2729        free(mJpegSettings);
2730        mJpegSettings = NULL;
2731    }
2732    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2733    CameraMetadata jpeg_settings;
2734    jpeg_settings = settings;
2735
2736    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2737        mJpegSettings->jpeg_orientation =
2738            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
2739    } else {
2740        mJpegSettings->jpeg_orientation = 0;
2741    }
2742    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
2743        mJpegSettings->jpeg_quality =
2744            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
2745    } else {
2746        mJpegSettings->jpeg_quality = 85;
2747    }
2748    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2749        mJpegSettings->thumbnail_size.width =
2750            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
2751        mJpegSettings->thumbnail_size.height =
2752            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
2753    } else {
2754        mJpegSettings->thumbnail_size.width = 0;
2755        mJpegSettings->thumbnail_size.height = 0;
2756    }
2757    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
2758        for (int i = 0; i < 3; i++) {
2759            mJpegSettings->gps_coordinates[i] =
2760                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
2761        }
2762    }
2763    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
2764        mJpegSettings->gps_timestamp =
2765            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
2766    }
2767
2768    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
2769        mJpegSettings->gps_processing_method =
2770            jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0];
2771    }
2772    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2773        mJpegSettings->sensor_sensitivity =
2774            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2775    }
2776    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2777        mJpegSettings->lens_focal_length =
2778            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2779    }
2780    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
2781    return 0;
2782}
2783
2784/*===========================================================================
2785 * FUNCTION   : captureResultCb
2786 *
2787 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
2788 *
2789 * PARAMETERS :
2790 *   @frame  : frame information from mm-camera-interface
2791 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
2792 *   @userdata: userdata
2793 *
2794 * RETURN     : NONE
2795 *==========================================================================*/
2796void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
2797                camera3_stream_buffer_t *buffer,
2798                uint32_t frame_number, void *userdata)
2799{
2800    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
2801    if (hw == NULL) {
2802        ALOGE("%s: Invalid hw %p", __func__, hw);
2803        return;
2804    }
2805
2806    hw->captureResultCb(metadata, buffer, frame_number);
2807    return;
2808}
2809
2810/*===========================================================================
2811 * FUNCTION   : initialize
2812 *
2813 * DESCRIPTION: Pass framework callback pointers to HAL
2814 *
2815 * PARAMETERS :
2816 *
2817 *
2818 * RETURN     : Success : 0
2819 *              Failure: -ENODEV
2820 *==========================================================================*/
2821
2822int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
2823                                  const camera3_callback_ops_t *callback_ops)
2824{
2825    ALOGV("%s: E", __func__);
2826    QCamera3HardwareInterface *hw =
2827        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2828    if (!hw) {
2829        ALOGE("%s: NULL camera device", __func__);
2830        return -ENODEV;
2831    }
2832
2833    int rc = hw->initialize(callback_ops);
2834    ALOGV("%s: X", __func__);
2835    return rc;
2836}
2837
2838/*===========================================================================
2839 * FUNCTION   : configure_streams
2840 *
2841 * DESCRIPTION:
2842 *
2843 * PARAMETERS :
2844 *
2845 *
2846 * RETURN     : Success: 0
2847 *              Failure: -EINVAL (if stream configuration is invalid)
2848 *                       -ENODEV (fatal error)
2849 *==========================================================================*/
2850
2851int QCamera3HardwareInterface::configure_streams(
2852        const struct camera3_device *device,
2853        camera3_stream_configuration_t *stream_list)
2854{
2855    ALOGV("%s: E", __func__);
2856    QCamera3HardwareInterface *hw =
2857        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2858    if (!hw) {
2859        ALOGE("%s: NULL camera device", __func__);
2860        return -ENODEV;
2861    }
2862    int rc = hw->configureStreams(stream_list);
2863    ALOGV("%s: X", __func__);
2864    return rc;
2865}
2866
2867/*===========================================================================
2868 * FUNCTION   : register_stream_buffers
2869 *
2870 * DESCRIPTION: Register stream buffers with the device
2871 *
2872 * PARAMETERS :
2873 *
2874 * RETURN     :
2875 *==========================================================================*/
2876int QCamera3HardwareInterface::register_stream_buffers(
2877        const struct camera3_device *device,
2878        const camera3_stream_buffer_set_t *buffer_set)
2879{
2880    ALOGV("%s: E", __func__);
2881    QCamera3HardwareInterface *hw =
2882        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2883    if (!hw) {
2884        ALOGE("%s: NULL camera device", __func__);
2885        return -ENODEV;
2886    }
2887    int rc = hw->registerStreamBuffers(buffer_set);
2888    ALOGV("%s: X", __func__);
2889    return rc;
2890}
2891
2892/*===========================================================================
2893 * FUNCTION   : construct_default_request_settings
2894 *
2895 * DESCRIPTION: Configure a settings buffer to meet the required use case
2896 *
2897 * PARAMETERS :
2898 *
2899 *
2900 * RETURN     : Success: Return valid metadata
2901 *              Failure: Return NULL
2902 *==========================================================================*/
2903const camera_metadata_t* QCamera3HardwareInterface::
2904    construct_default_request_settings(const struct camera3_device *device,
2905                                        int type)
2906{
2907
2908    ALOGV("%s: E", __func__);
2909    camera_metadata_t* fwk_metadata = NULL;
2910    QCamera3HardwareInterface *hw =
2911        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2912    if (!hw) {
2913        ALOGE("%s: NULL camera device", __func__);
2914        return NULL;
2915    }
2916
2917    fwk_metadata = hw->translateCapabilityToMetadata(type);
2918
2919    ALOGV("%s: X", __func__);
2920    return fwk_metadata;
2921}
2922
2923/*===========================================================================
2924 * FUNCTION   : process_capture_request
2925 *
2926 * DESCRIPTION:
2927 *
2928 * PARAMETERS :
2929 *
2930 *
2931 * RETURN     :
2932 *==========================================================================*/
2933int QCamera3HardwareInterface::process_capture_request(
2934                    const struct camera3_device *device,
2935                    camera3_capture_request_t *request)
2936{
2937    ALOGV("%s: E", __func__);
2938    QCamera3HardwareInterface *hw =
2939        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2940    if (!hw) {
2941        ALOGE("%s: NULL camera device", __func__);
2942        return -EINVAL;
2943    }
2944
2945    int rc = hw->processCaptureRequest(request);
2946    ALOGV("%s: X", __func__);
2947    return rc;
2948}
2949
2950/*===========================================================================
2951 * FUNCTION   : get_metadata_vendor_tag_ops
2952 *
2953 * DESCRIPTION:
2954 *
2955 * PARAMETERS :
2956 *
2957 *
2958 * RETURN     :
2959 *==========================================================================*/
2960
2961void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
2962                const struct camera3_device *device,
2963                vendor_tag_query_ops_t* ops)
2964{
2965    ALOGV("%s: E", __func__);
2966    QCamera3HardwareInterface *hw =
2967        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2968    if (!hw) {
2969        ALOGE("%s: NULL camera device", __func__);
2970        return;
2971    }
2972
2973    hw->getMetadataVendorTagOps(ops);
2974    ALOGV("%s: X", __func__);
2975    return;
2976}
2977
2978/*===========================================================================
2979 * FUNCTION   : dump
2980 *
2981 * DESCRIPTION:
2982 *
2983 * PARAMETERS :
2984 *
2985 *
2986 * RETURN     :
2987 *==========================================================================*/
2988
2989void QCamera3HardwareInterface::dump(
2990                const struct camera3_device *device, int fd)
2991{
2992    ALOGV("%s: E", __func__);
2993    QCamera3HardwareInterface *hw =
2994        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2995    if (!hw) {
2996        ALOGE("%s: NULL camera device", __func__);
2997        return;
2998    }
2999
3000    hw->dump(fd);
3001    ALOGV("%s: X", __func__);
3002    return;
3003}
3004
3005/*===========================================================================
3006 * FUNCTION   : close_camera_device
3007 *
3008 * DESCRIPTION:
3009 *
3010 * PARAMETERS :
3011 *
3012 *
3013 * RETURN     :
3014 *==========================================================================*/
3015int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3016{
3017    ALOGV("%s: E", __func__);
3018    int ret = NO_ERROR;
3019    QCamera3HardwareInterface *hw =
3020        reinterpret_cast<QCamera3HardwareInterface *>(
3021            reinterpret_cast<camera3_device_t *>(device)->priv);
3022    if (!hw) {
3023        ALOGE("NULL camera device");
3024        return BAD_VALUE;
3025    }
3026    delete hw;
3027    ALOGV("%s: X", __func__);
3028    return ret;
3029}
3030
3031}; //end namespace qcamera
3032