QCamera3HWI.cpp revision 71402bc67b7b2729cad7f970d976be15506cfdc7
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
54    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
55    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
56    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
57    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
58    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
59    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
60    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
61    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
62    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
63};
64
65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
66    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
67    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
68    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
69    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
70    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
71    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
72    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
73    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
74    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
75};
76
77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
78    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
79    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
80    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
81    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
82    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
83    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
84    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
85    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
86    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
87    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
88    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
89    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
90    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
91    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
92    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
93};
94
95const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
96    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
97    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
98    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
99    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
100    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
101    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
105    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
106    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
107    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
108    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
112    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
113    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
114    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
115    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
116    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
120    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
121    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
122    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
123};
124
125const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
126                                             320, 240, 176, 144, 0, 0};
127
128camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
129    initialize:                         QCamera3HardwareInterface::initialize,
130    configure_streams:                  QCamera3HardwareInterface::configure_streams,
131    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
132    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
133    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
134    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
135    dump:                               QCamera3HardwareInterface::dump,
136};
137
138
139/*===========================================================================
140 * FUNCTION   : QCamera3HardwareInterface
141 *
142 * DESCRIPTION: constructor of QCamera3HardwareInterface
143 *
144 * PARAMETERS :
145 *   @cameraId  : camera ID
146 *
147 * RETURN     : none
148 *==========================================================================*/
149QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
150    : mCameraId(cameraId),
151      mCameraHandle(NULL),
152      mCameraOpened(false),
153      mCallbackOps(NULL),
154      mInputStream(NULL),
155      mMetadataChannel(NULL),
156      mFirstRequest(false),
157      mParamHeap(NULL),
158      mParameters(NULL),
159      mJpegSettings(NULL)
160{
161    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
162    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
163    mCameraDevice.common.close = close_camera_device;
164    mCameraDevice.ops = &mCameraOps;
165    mCameraDevice.priv = this;
166    gCamCapability[cameraId]->version = CAM_HAL_V3;
167
168    pthread_mutex_init(&mRequestLock, NULL);
169    pthread_cond_init(&mRequestCond, NULL);
170    mPendingRequest = 0;
171    mCurrentRequestId = -1;
172
173    pthread_mutex_init(&mMutex, NULL);
174    pthread_mutex_init(&mCaptureResultLock, NULL);
175
176    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
177        mDefaultMetadata[i] = NULL;
178}
179
180/*===========================================================================
181 * FUNCTION   : ~QCamera3HardwareInterface
182 *
183 * DESCRIPTION: destructor of QCamera3HardwareInterface
184 *
185 * PARAMETERS : none
186 *
187 * RETURN     : none
188 *==========================================================================*/
189QCamera3HardwareInterface::~QCamera3HardwareInterface()
190{
191    ALOGV("%s: E", __func__);
192    /* Clean up all channels */
193    if (mMetadataChannel) {
194        mMetadataChannel->stop();
195        delete mMetadataChannel;
196        mMetadataChannel = NULL;
197    }
198    /* We need to stop all streams before deleting any stream */
199    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
200        it != mStreamInfo.end(); it++) {
201        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
202        channel->stop();
203    }
204    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
205        it != mStreamInfo.end(); it++) {
206        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
207        delete channel;
208        free (*it);
209    }
210
211    if (mJpegSettings != NULL) {
212        free(mJpegSettings);
213        mJpegSettings = NULL;
214    }
215    deinitParameters();
216    closeCamera();
217
218    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
219        if (mDefaultMetadata[i])
220            free_camera_metadata(mDefaultMetadata[i]);
221
222    pthread_mutex_destroy(&mRequestLock);
223    pthread_cond_destroy(&mRequestCond);
224
225    pthread_mutex_destroy(&mMutex);
226    pthread_mutex_destroy(&mCaptureResultLock);
227    ALOGV("%s: X", __func__);
228}
229
230/*===========================================================================
231 * FUNCTION   : openCamera
232 *
233 * DESCRIPTION: open camera
234 *
235 * PARAMETERS :
236 *   @hw_device  : double ptr for camera device struct
237 *
238 * RETURN     : int32_t type of status
239 *              NO_ERROR  -- success
240 *              none-zero failure code
241 *==========================================================================*/
242int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
243{
244    //int rc = NO_ERROR;
245    int rc = 0;
246    if (mCameraOpened) {
247        *hw_device = NULL;
248        return PERMISSION_DENIED;
249    }
250
251    rc = openCamera();
252    if (rc == 0)
253        *hw_device = &mCameraDevice.common;
254    else
255        *hw_device = NULL;
256    return rc;
257}
258
259/*===========================================================================
260 * FUNCTION   : openCamera
261 *
262 * DESCRIPTION: open camera
263 *
264 * PARAMETERS : none
265 *
266 * RETURN     : int32_t type of status
267 *              NO_ERROR  -- success
268 *              none-zero failure code
269 *==========================================================================*/
270int QCamera3HardwareInterface::openCamera()
271{
272    if (mCameraHandle) {
273        ALOGE("Failure: Camera already opened");
274        return ALREADY_EXISTS;
275    }
276    mCameraHandle = camera_open(mCameraId);
277    if (!mCameraHandle) {
278        ALOGE("camera_open failed.");
279        return UNKNOWN_ERROR;
280    }
281
282    mCameraOpened = true;
283
284    return NO_ERROR;
285}
286
287/*===========================================================================
288 * FUNCTION   : closeCamera
289 *
290 * DESCRIPTION: close camera
291 *
292 * PARAMETERS : none
293 *
294 * RETURN     : int32_t type of status
295 *              NO_ERROR  -- success
296 *              none-zero failure code
297 *==========================================================================*/
298int QCamera3HardwareInterface::closeCamera()
299{
300    int rc = NO_ERROR;
301
302    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
303    mCameraHandle = NULL;
304    mCameraOpened = false;
305
306    return rc;
307}
308
309/*===========================================================================
310 * FUNCTION   : initialize
311 *
312 * DESCRIPTION: Initialize frameworks callback functions
313 *
314 * PARAMETERS :
315 *   @callback_ops : callback function to frameworks
316 *
317 * RETURN     :
318 *
319 *==========================================================================*/
320int QCamera3HardwareInterface::initialize(
321        const struct camera3_callback_ops *callback_ops)
322{
323    int rc;
324
325    pthread_mutex_lock(&mMutex);
326
327    rc = initParameters();
328    if (rc < 0) {
329        ALOGE("%s: initParamters failed %d", __func__, rc);
330       goto err1;
331    }
332    //Create metadata channel and initialize it
333    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
334                    mCameraHandle->ops, captureResultCb,
335                    &gCamCapability[mCameraId]->padding_info, this);
336    if (mMetadataChannel == NULL) {
337        ALOGE("%s: failed to allocate metadata channel", __func__);
338        rc = -ENOMEM;
339        goto err2;
340    }
341    rc = mMetadataChannel->initialize();
342    if (rc < 0) {
343        ALOGE("%s: metadata channel initialization failed", __func__);
344        goto err3;
345    }
346
347    mCallbackOps = callback_ops;
348
349    pthread_mutex_unlock(&mMutex);
350    return 0;
351
352err3:
353    delete mMetadataChannel;
354    mMetadataChannel = NULL;
355err2:
356    deinitParameters();
357err1:
358    pthread_mutex_unlock(&mMutex);
359    return rc;
360}
361
362/*===========================================================================
363 * FUNCTION   : configureStreams
364 *
365 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
366 *              and output streams.
367 *
368 * PARAMETERS :
369 *   @stream_list : streams to be configured
370 *
371 * RETURN     :
372 *
373 *==========================================================================*/
374int QCamera3HardwareInterface::configureStreams(
375        camera3_stream_configuration_t *streamList)
376{
377    int rc = 0;
378    pthread_mutex_lock(&mMutex);
379
380    // Sanity check stream_list
381    if (streamList == NULL) {
382        ALOGE("%s: NULL stream configuration", __func__);
383        pthread_mutex_unlock(&mMutex);
384        return BAD_VALUE;
385    }
386
387    if (streamList->streams == NULL) {
388        ALOGE("%s: NULL stream list", __func__);
389        pthread_mutex_unlock(&mMutex);
390        return BAD_VALUE;
391    }
392
393    if (streamList->num_streams < 1) {
394        ALOGE("%s: Bad number of streams requested: %d", __func__,
395                streamList->num_streams);
396        pthread_mutex_unlock(&mMutex);
397        return BAD_VALUE;
398    }
399
400    camera3_stream_t *inputStream = NULL;
401    /* first invalidate all the steams in the mStreamList
402     * if they appear again, they will be validated */
403    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
404            it != mStreamInfo.end(); it++) {
405        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
406        channel->stop();
407        (*it)->status = INVALID;
408    }
409
410    for (size_t i = 0; i < streamList->num_streams; i++) {
411        camera3_stream_t *newStream = streamList->streams[i];
412        ALOGV("%s: newStream type = %d, stream format = %d",
413                __func__, newStream->stream_type, newStream->format);
414        //if the stream is in the mStreamList validate it
415        bool stream_exists = false;
416        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
417                it != mStreamInfo.end(); it++) {
418            if ((*it)->stream == newStream) {
419                QCamera3Channel *channel =
420                    (QCamera3Channel*)(*it)->stream->priv;
421                stream_exists = true;
422                (*it)->status = RECONFIGURE;
423                /*delete the channel object associated with the stream because
424                  we need to reconfigure*/
425                delete channel;
426                (*it)->stream->priv = NULL;
427            }
428        }
429        if (!stream_exists) {
430            //new stream
431            stream_info_t* stream_info;
432            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
433            stream_info->stream = newStream;
434            stream_info->status = VALID;
435            stream_info->registered = 0;
436            mStreamInfo.push_back(stream_info);
437        }
438        if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
439            if (inputStream != NULL) {
440                ALOGE("%s: Multiple input streams requested!", __func__);
441                pthread_mutex_unlock(&mMutex);
442                return BAD_VALUE;
443            }
444            inputStream = newStream;
445        }
446    }
447    mInputStream = inputStream;
448
449    /*clean up invalid streams*/
450    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
451            it != mStreamInfo.end();) {
452        if(((*it)->status) == INVALID){
453            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
454            delete channel;
455            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
456            free(*it);
457            it = mStreamInfo.erase(it);
458        } else {
459            it++;
460        }
461    }
462
463    //mMetadataChannel->stop();
464
465    /* Allocate channel objects for the requested streams */
466    for (size_t i = 0; i < streamList->num_streams; i++) {
467        camera3_stream_t *newStream = streamList->streams[i];
468        if (newStream->priv == NULL) {
469            //New stream, construct channel
470            switch (newStream->stream_type) {
471            case CAMERA3_STREAM_INPUT:
472                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
473                break;
474            case CAMERA3_STREAM_BIDIRECTIONAL:
475                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
476                    GRALLOC_USAGE_HW_CAMERA_WRITE;
477                break;
478            case CAMERA3_STREAM_OUTPUT:
479                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
480                break;
481            default:
482                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
483                break;
484            }
485
486            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
487                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
488                QCamera3Channel *channel;
489                switch (newStream->format) {
490                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
491                case HAL_PIXEL_FORMAT_YCbCr_420_888:
492                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
493                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
494                            mCameraHandle->ops, captureResultCb,
495                            &gCamCapability[mCameraId]->padding_info, this, newStream);
496                    if (channel == NULL) {
497                        ALOGE("%s: allocation of channel failed", __func__);
498                        pthread_mutex_unlock(&mMutex);
499                        return -ENOMEM;
500                    }
501
502                    newStream->priv = channel;
503                    break;
504                case HAL_PIXEL_FORMAT_BLOB:
505                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
506                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
507                            mCameraHandle->ops, captureResultCb,
508                            &gCamCapability[mCameraId]->padding_info, this, newStream);
509                    if (channel == NULL) {
510                        ALOGE("%s: allocation of channel failed", __func__);
511                        pthread_mutex_unlock(&mMutex);
512                        return -ENOMEM;
513                    }
514                    newStream->priv = channel;
515                    break;
516
517                //TODO: Add support for app consumed format?
518                default:
519                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
520                    break;
521                }
522            }
523        } else {
524            // Channel already exists for this stream
525            // Do nothing for now
526        }
527    }
528    /*For the streams to be reconfigured we need to register the buffers
529      since the framework wont*/
530    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
531            it != mStreamInfo.end(); it++) {
532        if ((*it)->status == RECONFIGURE) {
533            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
534            /*only register buffers for streams that have already been
535              registered*/
536            if ((*it)->registered) {
537                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
538                        (*it)->buffer_set.buffers);
539                if (rc != NO_ERROR) {
540                    ALOGE("%s: Failed to register the buffers of old stream,\
541                            rc = %d", __func__, rc);
542                }
543                ALOGV("%s: channel %p has %d buffers",
544                        __func__, channel, (*it)->buffer_set.num_buffers);
545            }
546        }
547
548        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
549        if (index == NAME_NOT_FOUND) {
550            mPendingBuffersMap.add((*it)->stream, 0);
551        } else {
552            mPendingBuffersMap.editValueAt(index) = 0;
553        }
554    }
555
556    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
557    mPendingRequestsList.clear();
558
559    //settings/parameters don't carry over for new configureStreams
560    memset(mParameters, 0, sizeof(parm_buffer_t));
561    mFirstRequest = true;
562
563    pthread_mutex_unlock(&mMutex);
564    return rc;
565}
566
567/*===========================================================================
568 * FUNCTION   : validateCaptureRequest
569 *
570 * DESCRIPTION: validate a capture request from camera service
571 *
572 * PARAMETERS :
573 *   @request : request from framework to process
574 *
575 * RETURN     :
576 *
577 *==========================================================================*/
578int QCamera3HardwareInterface::validateCaptureRequest(
579                    camera3_capture_request_t *request)
580{
581    ssize_t idx = 0;
582    const camera3_stream_buffer_t *b;
583    CameraMetadata meta;
584
585    /* Sanity check the request */
586    if (request == NULL) {
587        ALOGE("%s: NULL capture request", __func__);
588        return BAD_VALUE;
589    }
590
591    uint32_t frameNumber = request->frame_number;
592    if (request->input_buffer != NULL &&
593            request->input_buffer->stream != mInputStream) {
594        ALOGE("%s: Request %d: Input buffer not from input stream!",
595                __FUNCTION__, frameNumber);
596        return BAD_VALUE;
597    }
598    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
599        ALOGE("%s: Request %d: No output buffers provided!",
600                __FUNCTION__, frameNumber);
601        return BAD_VALUE;
602    }
603    if (request->input_buffer != NULL) {
604        //TODO
605        ALOGE("%s: Not supporting input buffer yet", __func__);
606        return BAD_VALUE;
607    }
608
609    // Validate all buffers
610    b = request->output_buffers;
611    do {
612        QCamera3Channel *channel =
613                static_cast<QCamera3Channel*>(b->stream->priv);
614        if (channel == NULL) {
615            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
616                    __func__, frameNumber, idx);
617            return BAD_VALUE;
618        }
619        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
620            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
621                    __func__, frameNumber, idx);
622            return BAD_VALUE;
623        }
624        if (b->release_fence != -1) {
625            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
626                    __func__, frameNumber, idx);
627            return BAD_VALUE;
628        }
629        if (b->buffer == NULL) {
630            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
631                    __func__, frameNumber, idx);
632            return BAD_VALUE;
633        }
634        idx++;
635        b = request->output_buffers + idx;
636    } while (idx < (ssize_t)request->num_output_buffers);
637
638    return NO_ERROR;
639}
640
641/*===========================================================================
642 * FUNCTION   : registerStreamBuffers
643 *
644 * DESCRIPTION: Register buffers for a given stream with the HAL device.
645 *
646 * PARAMETERS :
647 *   @stream_list : streams to be configured
648 *
649 * RETURN     :
650 *
651 *==========================================================================*/
652int QCamera3HardwareInterface::registerStreamBuffers(
653        const camera3_stream_buffer_set_t *buffer_set)
654{
655    int rc = 0;
656
657    pthread_mutex_lock(&mMutex);
658
659    if (buffer_set == NULL) {
660        ALOGE("%s: Invalid buffer_set parameter.", __func__);
661        pthread_mutex_unlock(&mMutex);
662        return -EINVAL;
663    }
664    if (buffer_set->stream == NULL) {
665        ALOGE("%s: Invalid stream parameter.", __func__);
666        pthread_mutex_unlock(&mMutex);
667        return -EINVAL;
668    }
669    if (buffer_set->num_buffers < 1) {
670        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
671        pthread_mutex_unlock(&mMutex);
672        return -EINVAL;
673    }
674    if (buffer_set->buffers == NULL) {
675        ALOGE("%s: Invalid buffers parameter.", __func__);
676        pthread_mutex_unlock(&mMutex);
677        return -EINVAL;
678    }
679
680    camera3_stream_t *stream = buffer_set->stream;
681    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
682
683    //set the buffer_set in the mStreamInfo array
684    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
685            it != mStreamInfo.end(); it++) {
686        if ((*it)->stream == stream) {
687            uint32_t numBuffers = buffer_set->num_buffers;
688            (*it)->buffer_set.stream = buffer_set->stream;
689            (*it)->buffer_set.num_buffers = numBuffers;
690            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
691            if ((*it)->buffer_set.buffers == NULL) {
692                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
693                pthread_mutex_unlock(&mMutex);
694                return -ENOMEM;
695            }
696            for (size_t j = 0; j < numBuffers; j++){
697                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
698            }
699            (*it)->registered = 1;
700        }
701    }
702
703    if (stream->stream_type != CAMERA3_STREAM_OUTPUT) {
704        ALOGE("%s: not yet support non output type stream", __func__);
705        pthread_mutex_unlock(&mMutex);
706        return -EINVAL;
707    }
708    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
709    if (rc < 0) {
710        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
711        pthread_mutex_unlock(&mMutex);
712        return -ENODEV;
713    }
714
715    pthread_mutex_unlock(&mMutex);
716    return NO_ERROR;
717}
718
719/*===========================================================================
720 * FUNCTION   : processCaptureRequest
721 *
722 * DESCRIPTION: process a capture request from camera service
723 *
724 * PARAMETERS :
725 *   @request : request from framework to process
726 *
727 * RETURN     :
728 *
729 *==========================================================================*/
730int QCamera3HardwareInterface::processCaptureRequest(
731                    camera3_capture_request_t *request)
732{
733    int rc = NO_ERROR;
734    int32_t request_id;
735    CameraMetadata meta;
736
737    pthread_mutex_lock(&mMutex);
738
739    rc = validateCaptureRequest(request);
740    if (rc != NO_ERROR) {
741        ALOGE("%s: incoming request is not valid", __func__);
742        pthread_mutex_unlock(&mMutex);
743        return rc;
744    }
745
746    uint32_t frameNumber = request->frame_number;
747
748    rc = setFrameParameters(request->frame_number, request->settings);
749    if (rc < 0) {
750        ALOGE("%s: fail to set frame parameters", __func__);
751        pthread_mutex_unlock(&mMutex);
752        return rc;
753    }
754
755    meta = request->settings;
756    if (meta.exists(ANDROID_REQUEST_ID)) {
757        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
758        mCurrentRequestId = request_id;
759        ALOGV("%s: Received request with id: %d",__func__, request_id);
760    } else if (mFirstRequest || mCurrentRequestId == -1){
761        ALOGE("%s: Unable to find request id field, \
762                & no previous id available", __func__);
763        return NAME_NOT_FOUND;
764    } else {
765        ALOGV("%s: Re-using old request id", __func__);
766        request_id = mCurrentRequestId;
767    }
768
769
770    ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__,
771                                    request->num_output_buffers);
772    // Acquire all request buffers first
773    for (size_t i = 0; i < request->num_output_buffers; i++) {
774        const camera3_stream_buffer_t& output = request->output_buffers[i];
775        sp<Fence> acquireFence = new Fence(output.acquire_fence);
776
777        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
778        //Call function to store local copy of jpeg data for encode params.
779            rc = getJpegSettings(request->settings);
780            if (rc < 0) {
781                ALOGE("%s: failed to get jpeg parameters", __func__);
782                pthread_mutex_unlock(&mMutex);
783                return rc;
784            }
785        }
786
787        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
788        if (rc != OK) {
789            ALOGE("%s: fence wait failed %d", __func__, rc);
790            pthread_mutex_unlock(&mMutex);
791            return rc;
792        }
793    }
794
795    /* Update pending request list and pending buffers map */
796    pthread_mutex_lock(&mRequestLock);
797    PendingRequestInfo pendingRequest;
798    pendingRequest.frame_number = frameNumber;
799    pendingRequest.num_buffers = request->num_output_buffers;
800    pendingRequest.request_id = request_id;
801
802    for (size_t i = 0; i < request->num_output_buffers; i++) {
803        RequestedBufferInfo requestedBuf;
804        requestedBuf.stream = request->output_buffers[i].stream;
805        requestedBuf.buffer = NULL;
806        pendingRequest.buffers.push_back(requestedBuf);
807
808        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
809    }
810    mPendingRequestsList.push_back(pendingRequest);
811    pthread_mutex_unlock(&mRequestLock);
812
813    // Notify metadata channel we receive a request
814    mMetadataChannel->request(NULL, frameNumber);
815
816    // Call request on other streams
817    for (size_t i = 0; i < request->num_output_buffers; i++) {
818        const camera3_stream_buffer_t& output = request->output_buffers[i];
819        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
820
821        if (channel == NULL) {
822            ALOGE("%s: invalid channel pointer for stream", __func__);
823            continue;
824        }
825
826        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
827            rc = channel->request(output.buffer, frameNumber, mJpegSettings);
828        } else {
829            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
830                __LINE__, output.buffer, frameNumber);
831            rc = channel->request(output.buffer, frameNumber);
832        }
833        if (rc < 0)
834            ALOGE("%s: request failed", __func__);
835    }
836
837    mFirstRequest = false;
838
839    //Block on conditional variable
840    pthread_mutex_lock(&mRequestLock);
841    mPendingRequest = 1;
842    while (mPendingRequest == 1) {
843        pthread_cond_wait(&mRequestCond, &mRequestLock);
844    }
845    pthread_mutex_unlock(&mRequestLock);
846
847    pthread_mutex_unlock(&mMutex);
848    return rc;
849}
850
851/*===========================================================================
852 * FUNCTION   : getMetadataVendorTagOps
853 *
854 * DESCRIPTION:
855 *
856 * PARAMETERS :
857 *
858 *
859 * RETURN     :
860 *==========================================================================*/
861void QCamera3HardwareInterface::getMetadataVendorTagOps(
862                    vendor_tag_query_ops_t* /*ops*/)
863{
864    /* Enable locks when we eventually add Vendor Tags */
865    /*
866    pthread_mutex_lock(&mMutex);
867
868    pthread_mutex_unlock(&mMutex);
869    */
870    return;
871}
872
873/*===========================================================================
874 * FUNCTION   : dump
875 *
876 * DESCRIPTION:
877 *
878 * PARAMETERS :
879 *
880 *
881 * RETURN     :
882 *==========================================================================*/
883void QCamera3HardwareInterface::dump(int /*fd*/)
884{
885    /*Enable lock when we implement this function*/
886    /*
887    pthread_mutex_lock(&mMutex);
888
889    pthread_mutex_unlock(&mMutex);
890    */
891    return;
892}
893
894/*===========================================================================
895 * FUNCTION   : captureResultCb
896 *
897 * DESCRIPTION: Callback handler for all capture result
898 *              (streams, as well as metadata)
899 *
900 * PARAMETERS :
901 *   @metadata : metadata information
902 *   @buffer   : actual gralloc buffer to be returned to frameworks.
903 *               NULL if metadata.
904 *
905 * RETURN     : NONE
906 *==========================================================================*/
907void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
908                camera3_stream_buffer_t *buffer, uint32_t frame_number)
909{
910    pthread_mutex_lock(&mRequestLock);
911
912    if (metadata_buf) {
913        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
914        int32_t frame_number_valid = *(int32_t *)
915            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
916        uint32_t frame_number = *(uint32_t *)
917            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
918        const struct timeval *tv = (const struct timeval *)
919            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
920        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
921            tv->tv_usec * NSEC_PER_USEC;
922
923        if (!frame_number_valid) {
924            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
925            mMetadataChannel->bufDone(metadata_buf);
926            goto done_metadata;
927        }
928        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
929                frame_number, capture_time);
930
931        // Go through the pending requests info and send shutter/results to frameworks
932        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
933                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
934            camera3_capture_result_t result;
935            camera3_notify_msg_t notify_msg;
936            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
937
938            // Flush out all entries with less or equal frame numbers.
939
940            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
941            //Right now it's the same as metadata timestamp
942
943            //TODO: When there is metadata drop, how do we derive the timestamp of
944            //dropped frames? For now, we fake the dropped timestamp by substracting
945            //from the reported timestamp
946            nsecs_t current_capture_time = capture_time -
947                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
948
949            // Send shutter notify to frameworks
950            notify_msg.type = CAMERA3_MSG_SHUTTER;
951            notify_msg.message.shutter.frame_number = i->frame_number;
952            notify_msg.message.shutter.timestamp = current_capture_time;
953            mCallbackOps->notify(mCallbackOps, &notify_msg);
954            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
955                    i->frame_number, capture_time);
956
957            // Send empty metadata with already filled buffers for dropped metadata
958            // and send valid metadata with already filled buffers for current metadata
959            if (i->frame_number < frame_number) {
960                CameraMetadata dummyMetadata;
961                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
962                        &current_capture_time, 1);
963                dummyMetadata.update(ANDROID_REQUEST_ID,
964                        &(i->request_id), 1);
965                result.result = dummyMetadata.release();
966            } else {
967                result.result = translateCbMetadataToResultMetadata(metadata,
968                        current_capture_time, i->request_id);
969                // Return metadata buffer
970                mMetadataChannel->bufDone(metadata_buf);
971            }
972            if (!result.result) {
973                ALOGE("%s: metadata is NULL", __func__);
974            }
975            result.frame_number = i->frame_number;
976            result.num_output_buffers = 0;
977            result.output_buffers = NULL;
978            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
979                    j != i->buffers.end(); j++) {
980                if (j->buffer) {
981                    result.num_output_buffers++;
982                }
983            }
984
985            if (result.num_output_buffers > 0) {
986                camera3_stream_buffer_t *result_buffers =
987                    new camera3_stream_buffer_t[result.num_output_buffers];
988                if (!result_buffers) {
989                    ALOGE("%s: Fatal error: out of memory", __func__);
990                }
991                size_t result_buffers_idx = 0;
992                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
993                        j != i->buffers.end(); j++) {
994                    if (j->buffer) {
995                        result_buffers[result_buffers_idx++] = *(j->buffer);
996                        free(j->buffer);
997                        mPendingBuffersMap.editValueFor(j->stream)--;
998                    }
999                }
1000                result.output_buffers = result_buffers;
1001
1002                mCallbackOps->process_capture_result(mCallbackOps, &result);
1003                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1004                        __func__, result.frame_number, current_capture_time);
1005                free_camera_metadata((camera_metadata_t *)result.result);
1006                delete[] result_buffers;
1007            } else {
1008                mCallbackOps->process_capture_result(mCallbackOps, &result);
1009                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1010                        __func__, result.frame_number, current_capture_time);
1011                free_camera_metadata((camera_metadata_t *)result.result);
1012            }
1013            // erase the element from the list
1014            i = mPendingRequestsList.erase(i);
1015        }
1016
1017
1018done_metadata:
1019        bool max_buffers_dequeued = false;
1020        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1021            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1022            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1023            if (queued_buffers == stream->max_buffers) {
1024                max_buffers_dequeued = true;
1025                break;
1026            }
1027        }
1028        if (!max_buffers_dequeued) {
1029            // Unblock process_capture_request
1030            mPendingRequest = 0;
1031            pthread_cond_signal(&mRequestCond);
1032        }
1033    } else {
1034        // If the frame number doesn't exist in the pending request list,
1035        // directly send the buffer to the frameworks, and update pending buffers map
1036        // Otherwise, book-keep the buffer.
1037        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1038        while (i != mPendingRequestsList.end() && i->frame_number != frame_number)
1039            i++;
1040        if (i == mPendingRequestsList.end()) {
1041            // Verify all pending requests frame_numbers are greater
1042            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1043                    j != mPendingRequestsList.end(); j++) {
1044                if (j->frame_number < frame_number) {
1045                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1046                            __func__, j->frame_number, frame_number);
1047                }
1048            }
1049            camera3_capture_result_t result;
1050            result.result = NULL;
1051            result.frame_number = frame_number;
1052            result.num_output_buffers = 1;
1053            result.output_buffers = buffer;
1054            ALOGV("%s: result frame_number = %d, buffer = %p",
1055                    __func__, frame_number, buffer);
1056            mPendingBuffersMap.editValueFor(buffer->stream)--;
1057            mCallbackOps->process_capture_result(mCallbackOps, &result);
1058        } else {
1059            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1060                    j != i->buffers.end(); j++) {
1061                if (j->stream == buffer->stream) {
1062                    if (j->buffer != NULL) {
1063                        ALOGE("%s: Error: buffer is already set", __func__);
1064                    } else {
1065                        j->buffer = (camera3_stream_buffer_t *)malloc(
1066                                sizeof(camera3_stream_buffer_t));
1067                        *(j->buffer) = *buffer;
1068                        ALOGV("%s: cache buffer %p at result frame_number %d",
1069                                __func__, buffer, frame_number);
1070                    }
1071                }
1072            }
1073        }
1074    }
1075
1076    pthread_mutex_unlock(&mRequestLock);
1077    return;
1078}
1079
1080/*===========================================================================
1081 * FUNCTION   : translateCbMetadataToResultMetadata
1082 *
1083 * DESCRIPTION:
1084 *
1085 * PARAMETERS :
1086 *   @metadata : metadata information from callback
1087 *
1088 * RETURN     : camera_metadata_t*
1089 *              metadata in a format specified by fwk
1090 *==========================================================================*/
1091camera_metadata_t*
1092QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1093                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1094                                 int32_t request_id)
1095{
1096    CameraMetadata camMetadata;
1097    camera_metadata_t* resultMetadata;
1098
1099
1100    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1101    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1102
1103    /*CAM_INTF_META_HISTOGRAM - TODO*/
1104    /*cam_hist_stats_t  *histogram =
1105      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1106      metadata);*/
1107
1108    /*face detection*/
1109    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1110        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1111    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1112    int32_t faceIds[numFaces];
1113    uint8_t faceScores[numFaces];
1114    int32_t faceRectangles[numFaces * 4];
1115    int32_t faceLandmarks[numFaces * 6];
1116    int j = 0, k = 0;
1117    for (int i = 0; i < numFaces; i++) {
1118        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1119        faceScores[i] = faceDetectionInfo->faces[i].score;
1120        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1121                faceRectangles+j, -1);
1122        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1123        j+= 4;
1124        k+= 6;
1125    }
1126    if (numFaces > 0) {
1127        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1128        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1129        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1130            faceRectangles, numFaces*4);
1131        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1132            faceLandmarks, numFaces*6);
1133    }
1134
1135    uint8_t  *color_correct_mode =
1136        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1137    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1138
1139    int32_t  *ae_precapture_id =
1140        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1141    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1142
1143    /*aec regions*/
1144    cam_area_t  *hAeRegions =
1145        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1146    int32_t aeRegions[5];
1147    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1148    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1149
1150    uint8_t  *ae_state =
1151        (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1152    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1153
1154    uint8_t  *focusMode =
1155        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1156    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1157
1158    /*af regions*/
1159    cam_area_t  *hAfRegions =
1160        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1161    int32_t afRegions[5];
1162    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1163    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1164
1165    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1166    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1167
1168    int32_t  *afTriggerId =
1169        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1170    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1171
1172    uint8_t  *whiteBalance =
1173        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1174    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1175
1176    /*awb regions*/
1177    cam_area_t  *hAwbRegions =
1178        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1179    int32_t awbRegions[5];
1180    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1181    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1182
1183    uint8_t  *whiteBalanceState =
1184        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1185    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1186
1187    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1188    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1189
1190    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1191    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1192
1193    uint8_t  *flashPower =
1194        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1195    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1196
1197    int64_t  *flashFiringTime =
1198        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1199    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1200
1201    /*int32_t  *ledMode =
1202      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1203      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1204
1205    uint8_t  *flashState =
1206        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1207    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1208
1209    uint8_t  *hotPixelMode =
1210        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1211    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1212
1213    float  *lensAperture =
1214        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1215    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1216
1217    float  *filterDensity =
1218        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1219    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1220
1221    float  *focalLength =
1222        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1223    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1224
1225    float  *focusDistance =
1226        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1227    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1228
1229    float  *focusRange =
1230        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1231    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1232
1233    uint8_t  *opticalStab =
1234        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1235    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1236
1237    /*int32_t  *focusState =
1238      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1239      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1240
1241    uint8_t  *noiseRedMode =
1242        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1243    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1244
1245    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1246
1247    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1248        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1249    int32_t scalerCropRegion[4];
1250    scalerCropRegion[0] = hScalerCropRegion->left;
1251    scalerCropRegion[1] = hScalerCropRegion->top;
1252    scalerCropRegion[2] = hScalerCropRegion->width;
1253    scalerCropRegion[3] = hScalerCropRegion->height;
1254    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1255
1256    int64_t  *sensorExpTime =
1257        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1258    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1259
1260    int64_t  *sensorFameDuration =
1261        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1262    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1263
1264    int32_t  *sensorSensitivity =
1265        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1266    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1267
1268    uint8_t  *shadingMode =
1269        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1270    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1271
1272    uint8_t  *faceDetectMode =
1273        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1274    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1275
1276    uint8_t  *histogramMode =
1277        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1278    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1279
1280    uint8_t  *sharpnessMapMode =
1281        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1282    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1283            sharpnessMapMode, 1);
1284
1285    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1286    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1287        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1288    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1289            (int32_t*)sharpnessMap->sharpness,
1290            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1291
1292    resultMetadata = camMetadata.release();
1293    return resultMetadata;
1294}
1295
1296/*===========================================================================
1297 * FUNCTION   : convertToRegions
1298 *
1299 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1300 *
1301 * PARAMETERS :
1302 *   @rect   : cam_rect_t struct to convert
1303 *   @region : int32_t destination array
1304 *   @weight : if we are converting from cam_area_t, weight is valid
1305 *             else weight = -1
1306 *
1307 *==========================================================================*/
1308void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1309    region[0] = rect.left;
1310    region[1] = rect.top;
1311    region[2] = rect.left + rect.width;
1312    region[3] = rect.top + rect.height;
1313    if (weight > -1) {
1314        region[4] = weight;
1315    }
1316}
1317
1318/*===========================================================================
1319 * FUNCTION   : convertFromRegions
1320 *
1321 * DESCRIPTION: helper method to convert from array to cam_rect_t
1322 *
1323 * PARAMETERS :
1324 *   @rect   : cam_rect_t struct to convert
1325 *   @region : int32_t destination array
1326 *   @weight : if we are converting from cam_area_t, weight is valid
1327 *             else weight = -1
1328 *
1329 *==========================================================================*/
1330void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1331                                                   const camera_metadata_t *settings,
1332                                                   uint32_t tag){
1333    CameraMetadata frame_settings;
1334    frame_settings = settings;
1335    int32_t x_min = frame_settings.find(tag).data.i32[0];
1336    int32_t y_min = frame_settings.find(tag).data.i32[1];
1337    int32_t x_max = frame_settings.find(tag).data.i32[2];
1338    int32_t y_max = frame_settings.find(tag).data.i32[3];
1339    roi->weight = frame_settings.find(tag).data.i32[4];
1340    roi->rect.left = x_min;
1341    roi->rect.top = y_min;
1342    roi->rect.width = x_max - x_min;
1343    roi->rect.height = y_max - y_min;
1344}
1345
1346/*===========================================================================
1347 * FUNCTION   : resetIfNeededROI
1348 *
1349 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1350 *              crop region
1351 *
1352 * PARAMETERS :
1353 *   @roi       : cam_area_t struct to resize
1354 *   @scalerCropRegion : cam_crop_region_t region to compare against
1355 *
1356 *
1357 *==========================================================================*/
1358bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1359                                                 const cam_crop_region_t* scalerCropRegion)
1360{
1361    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1362    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1363    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1364    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1365    if ((roi_x_max < scalerCropRegion->left) ||
1366        (roi_y_max < scalerCropRegion->top)  ||
1367        (roi->rect.left > crop_x_max) ||
1368        (roi->rect.top > crop_y_max)){
1369        return false;
1370    }
1371    if (roi->rect.left < scalerCropRegion->left) {
1372        roi->rect.left = scalerCropRegion->left;
1373    }
1374    if (roi->rect.top < scalerCropRegion->top) {
1375        roi->rect.top = scalerCropRegion->top;
1376    }
1377    if (roi_x_max > crop_x_max) {
1378        roi_x_max = crop_x_max;
1379    }
1380    if (roi_y_max > crop_y_max) {
1381        roi_y_max = crop_y_max;
1382    }
1383    roi->rect.width = roi_x_max - roi->rect.left;
1384    roi->rect.height = roi_y_max - roi->rect.top;
1385    return true;
1386}
1387
1388/*===========================================================================
1389 * FUNCTION   : convertLandmarks
1390 *
1391 * DESCRIPTION: helper method to extract the landmarks from face detection info
1392 *
1393 * PARAMETERS :
1394 *   @face   : cam_rect_t struct to convert
1395 *   @landmarks : int32_t destination array
1396 *
1397 *
1398 *==========================================================================*/
1399void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1400{
1401    landmarks[0] = face.left_eye_center.x;
1402    landmarks[1] = face.left_eye_center.y;
1403    landmarks[2] = face.right_eye_center.y;
1404    landmarks[3] = face.right_eye_center.y;
1405    landmarks[4] = face.mouth_center.x;
1406    landmarks[5] = face.mouth_center.y;
1407}
1408
1409#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1410/*===========================================================================
1411 * FUNCTION   : initCapabilities
1412 *
1413 * DESCRIPTION: initialize camera capabilities in static data struct
1414 *
1415 * PARAMETERS :
1416 *   @cameraId  : camera Id
1417 *
1418 * RETURN     : int32_t type of status
1419 *              NO_ERROR  -- success
1420 *              none-zero failure code
1421 *==========================================================================*/
1422int QCamera3HardwareInterface::initCapabilities(int cameraId)
1423{
1424    int rc = 0;
1425    mm_camera_vtbl_t *cameraHandle = NULL;
1426    QCamera3HeapMemory *capabilityHeap = NULL;
1427
1428    cameraHandle = camera_open(cameraId);
1429    if (!cameraHandle) {
1430        ALOGE("%s: camera_open failed", __func__);
1431        rc = -1;
1432        goto open_failed;
1433    }
1434
1435    capabilityHeap = new QCamera3HeapMemory();
1436    if (capabilityHeap == NULL) {
1437        ALOGE("%s: creation of capabilityHeap failed", __func__);
1438        goto heap_creation_failed;
1439    }
1440    /* Allocate memory for capability buffer */
1441    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1442    if(rc != OK) {
1443        ALOGE("%s: No memory for cappability", __func__);
1444        goto allocate_failed;
1445    }
1446
1447    /* Map memory for capability buffer */
1448    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1449    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1450                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1451                                capabilityHeap->getFd(0),
1452                                sizeof(cam_capability_t));
1453    if(rc < 0) {
1454        ALOGE("%s: failed to map capability buffer", __func__);
1455        goto map_failed;
1456    }
1457
1458    /* Query Capability */
1459    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1460    if(rc < 0) {
1461        ALOGE("%s: failed to query capability",__func__);
1462        goto query_failed;
1463    }
1464    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1465    if (!gCamCapability[cameraId]) {
1466        ALOGE("%s: out of memory", __func__);
1467        goto query_failed;
1468    }
1469    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1470                                        sizeof(cam_capability_t));
1471    rc = 0;
1472
1473query_failed:
1474    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1475                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1476map_failed:
1477    capabilityHeap->deallocate();
1478allocate_failed:
1479    delete capabilityHeap;
1480heap_creation_failed:
1481    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1482    cameraHandle = NULL;
1483open_failed:
1484    return rc;
1485}
1486
1487/*===========================================================================
1488 * FUNCTION   : initParameters
1489 *
1490 * DESCRIPTION: initialize camera parameters
1491 *
1492 * PARAMETERS :
1493 *
1494 * RETURN     : int32_t type of status
1495 *              NO_ERROR  -- success
1496 *              none-zero failure code
1497 *==========================================================================*/
1498int QCamera3HardwareInterface::initParameters()
1499{
1500    int rc = 0;
1501
1502    //Allocate Set Param Buffer
1503    mParamHeap = new QCamera3HeapMemory();
1504    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1505    if(rc != OK) {
1506        rc = NO_MEMORY;
1507        ALOGE("Failed to allocate SETPARM Heap memory");
1508        delete mParamHeap;
1509        mParamHeap = NULL;
1510        return rc;
1511    }
1512
1513    //Map memory for parameters buffer
1514    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1515            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1516            mParamHeap->getFd(0),
1517            sizeof(parm_buffer_t));
1518    if(rc < 0) {
1519        ALOGE("%s:failed to map SETPARM buffer",__func__);
1520        rc = FAILED_TRANSACTION;
1521        mParamHeap->deallocate();
1522        delete mParamHeap;
1523        mParamHeap = NULL;
1524        return rc;
1525    }
1526
1527    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1528    return rc;
1529}
1530
1531/*===========================================================================
1532 * FUNCTION   : deinitParameters
1533 *
1534 * DESCRIPTION: de-initialize camera parameters
1535 *
1536 * PARAMETERS :
1537 *
1538 * RETURN     : NONE
1539 *==========================================================================*/
1540void QCamera3HardwareInterface::deinitParameters()
1541{
1542    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1543            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1544
1545    mParamHeap->deallocate();
1546    delete mParamHeap;
1547    mParamHeap = NULL;
1548
1549    mParameters = NULL;
1550}
1551
1552/*===========================================================================
1553 * FUNCTION   : calcMaxJpegSize
1554 *
1555 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1556 *
1557 * PARAMETERS :
1558 *
1559 * RETURN     : max_jpeg_size
1560 *==========================================================================*/
1561int QCamera3HardwareInterface::calcMaxJpegSize()
1562{
1563    int32_t max_jpeg_size = 0;
1564    int temp_width, temp_height;
1565    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1566        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1567        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1568        if (temp_width * temp_height > max_jpeg_size ) {
1569            max_jpeg_size = temp_width * temp_height;
1570        }
1571    }
1572    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1573    return max_jpeg_size;
1574}
1575
1576/*===========================================================================
1577 * FUNCTION   : initStaticMetadata
1578 *
1579 * DESCRIPTION: initialize the static metadata
1580 *
1581 * PARAMETERS :
1582 *   @cameraId  : camera Id
1583 *
1584 * RETURN     : int32_t type of status
1585 *              0  -- success
1586 *              non-zero failure code
1587 *==========================================================================*/
1588int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1589{
1590    int rc = 0;
1591    CameraMetadata staticInfo;
1592    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1593    /*HAL 3 only*/
1594    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1595                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1596
1597    /*hard coded for now but this should come from sensor*/
1598    float min_focus_distance;
1599    if(facingBack){
1600        min_focus_distance = 10;
1601    } else {
1602        min_focus_distance = 0;
1603    }
1604    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1605                    &min_focus_distance, 1);
1606
1607    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1608                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1609
1610    /*should be using focal lengths but sensor doesn't provide that info now*/
1611    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1612                      &gCamCapability[cameraId]->focal_length,
1613                      1);
1614
1615    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1616                      gCamCapability[cameraId]->apertures,
1617                      gCamCapability[cameraId]->apertures_count);
1618
1619    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1620                gCamCapability[cameraId]->filter_densities,
1621                gCamCapability[cameraId]->filter_densities_count);
1622
1623
1624    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1625                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1626                      gCamCapability[cameraId]->optical_stab_modes_count);
1627
1628    staticInfo.update(ANDROID_LENS_POSITION,
1629                      gCamCapability[cameraId]->lens_position,
1630                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1631
1632    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1633                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1634    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1635                      lens_shading_map_size,
1636                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1637
1638    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map,
1639            sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float));
1640
1641    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1642                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1643    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1644            geo_correction_map_size,
1645            sizeof(geo_correction_map_size)/sizeof(int32_t));
1646
1647    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1648                       gCamCapability[cameraId]->geo_correction_map,
1649                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1650
1651    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1652            gCamCapability[cameraId]->sensor_physical_size, 2);
1653
1654    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1655            gCamCapability[cameraId]->exposure_time_range, 2);
1656
1657    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1658            &gCamCapability[cameraId]->max_frame_duration, 1);
1659
1660
1661    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1662                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1663
1664    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1665                                               gCamCapability[cameraId]->pixel_array_size.height};
1666    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1667                      pixel_array_size, 2);
1668
1669    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width,
1670                                                gCamCapability[cameraId]->active_array_size.height};
1671
1672    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1673                      active_array_size, 2);
1674
1675    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1676            &gCamCapability[cameraId]->white_level, 1);
1677
1678    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1679            gCamCapability[cameraId]->black_level_pattern, 4);
1680
1681    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1682                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1683
1684    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1685                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1686
1687    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1688                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1689    /*hardcode 0 for now*/
1690    int32_t max_face_count = 0;
1691    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1692                      &max_face_count, 1);
1693
1694    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1695                      &gCamCapability[cameraId]->histogram_size, 1);
1696
1697    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1698            &gCamCapability[cameraId]->max_histogram_count, 1);
1699
1700    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1701                                                gCamCapability[cameraId]->sharpness_map_size.height};
1702
1703    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1704            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1705
1706    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1707            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1708
1709
1710    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1711                      &gCamCapability[cameraId]->raw_min_duration,
1712                       1);
1713
1714    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888};
1715    int scalar_formats_count = 1;
1716    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1717                      scalar_formats,
1718                      scalar_formats_count);
1719
1720    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1721    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1722              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1723              available_processed_sizes);
1724    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1725                available_processed_sizes,
1726                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1727
1728    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1729    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1730                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1731                 available_fps_ranges);
1732    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1733            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1734
1735    camera_metadata_rational exposureCompensationStep = {
1736            gCamCapability[cameraId]->exp_compensation_step.numerator,
1737            gCamCapability[cameraId]->exp_compensation_step.denominator};
1738    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1739                      &exposureCompensationStep, 1);
1740
1741    /*TO DO*/
1742    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1743    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1744                      availableVstabModes, sizeof(availableVstabModes));
1745
1746    /*HAL 1 and HAL 3 common*/
1747    float maxZoom = 10;
1748    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1749            &maxZoom, 1);
1750
1751    int32_t max3aRegions = 1;
1752    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1753            &max3aRegions, 1);
1754
1755    uint8_t availableFaceDetectModes[] = {
1756            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1757    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1758                      availableFaceDetectModes,
1759                      sizeof(availableFaceDetectModes));
1760
1761    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1762                                       gCamCapability[cameraId]->raw_dim.height};
1763    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1764                      raw_size,
1765                      sizeof(raw_size)/sizeof(uint32_t));
1766
1767    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1768                                                        gCamCapability[cameraId]->exposure_compensation_max};
1769    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1770            exposureCompensationRange,
1771            sizeof(exposureCompensationRange)/sizeof(int32_t));
1772
1773    uint8_t lensFacing = (facingBack) ?
1774            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1775    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1776
1777    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1778    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1779              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1780              available_jpeg_sizes);
1781    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1782                available_jpeg_sizes,
1783                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1784
1785    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1786                      available_thumbnail_sizes,
1787                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1788
1789    int32_t max_jpeg_size = 0;
1790    int temp_width, temp_height;
1791    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1792        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1793        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1794        if (temp_width * temp_height > max_jpeg_size ) {
1795            max_jpeg_size = temp_width * temp_height;
1796        }
1797    }
1798    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1799    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1800                      &max_jpeg_size, 1);
1801
1802    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1803    int32_t size = 0;
1804    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1805        int val = lookupFwkName(EFFECT_MODES_MAP,
1806                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1807                                   gCamCapability[cameraId]->supported_effects[i]);
1808        if (val != NAME_NOT_FOUND) {
1809            avail_effects[size] = (uint8_t)val;
1810            size++;
1811        }
1812    }
1813    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1814                      avail_effects,
1815                      size);
1816
1817    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1818    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1819    int32_t supported_scene_modes_cnt = 0;
1820    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1821        int val = lookupFwkName(SCENE_MODES_MAP,
1822                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1823                                gCamCapability[cameraId]->supported_scene_modes[i]);
1824        if (val != NAME_NOT_FOUND) {
1825            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1826            supported_indexes[supported_scene_modes_cnt] = i;
1827            supported_scene_modes_cnt++;
1828        }
1829    }
1830
1831    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1832                      avail_scene_modes,
1833                      supported_scene_modes_cnt);
1834
1835    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1836    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1837                      supported_scene_modes_cnt,
1838                      scene_mode_overrides,
1839                      supported_indexes);
1840    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1841                      scene_mode_overrides,
1842                      supported_scene_modes_cnt*3);
1843
1844    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1845    size = 0;
1846    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1847        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1848                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1849                                 gCamCapability[cameraId]->supported_antibandings[i]);
1850        if (val != NAME_NOT_FOUND) {
1851            avail_antibanding_modes[size] = (uint8_t)val;
1852            size++;
1853        }
1854
1855    }
1856    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1857                      avail_antibanding_modes,
1858                      size);
1859
1860    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1861    size = 0;
1862    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1863        int val = lookupFwkName(FOCUS_MODES_MAP,
1864                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1865                                gCamCapability[cameraId]->supported_focus_modes[i]);
1866        if (val != NAME_NOT_FOUND) {
1867            avail_af_modes[size] = (uint8_t)val;
1868            size++;
1869        }
1870    }
1871    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1872                      avail_af_modes,
1873                      size);
1874
1875    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1876    size = 0;
1877    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1878        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1879                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1880                                    gCamCapability[cameraId]->supported_white_balances[i]);
1881        if (val != NAME_NOT_FOUND) {
1882            avail_awb_modes[size] = (uint8_t)val;
1883            size++;
1884        }
1885    }
1886    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1887                      avail_awb_modes,
1888                      size);
1889
1890    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1891    size = 0;
1892    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1893        int val = lookupFwkName(FLASH_MODES_MAP,
1894                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1895                                gCamCapability[cameraId]->supported_flash_modes[i]);
1896        if (val != NAME_NOT_FOUND) {
1897            avail_flash_modes[size] = (uint8_t)val;
1898            size++;
1899        }
1900    }
1901    static uint8_t flashAvailable = 0;
1902    if (size > 1) {
1903        //flash is supported
1904        flashAvailable = 1;
1905    }
1906    staticInfo.update(ANDROID_FLASH_MODE,
1907                      avail_flash_modes,
1908                      size);
1909
1910    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
1911            &flashAvailable, 1);
1912
1913    uint8_t avail_ae_modes[5];
1914    size = 0;
1915    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
1916        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
1917        size++;
1918    }
1919    if (flashAvailable) {
1920        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
1921        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
1922        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
1923    }
1924    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1925                      avail_ae_modes,
1926                      size);
1927
1928    gStaticMetadata[cameraId] = staticInfo.release();
1929    return rc;
1930}
1931
1932/*===========================================================================
1933 * FUNCTION   : makeTable
1934 *
1935 * DESCRIPTION: make a table of sizes
1936 *
1937 * PARAMETERS :
1938 *
1939 *
1940 *==========================================================================*/
1941void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
1942                                          int32_t* sizeTable)
1943{
1944    int j = 0;
1945    for (int i = 0; i < size; i++) {
1946        sizeTable[j] = dimTable[i].width;
1947        sizeTable[j+1] = dimTable[i].height;
1948        j+=2;
1949    }
1950}
1951
1952/*===========================================================================
1953 * FUNCTION   : makeFPSTable
1954 *
1955 * DESCRIPTION: make a table of fps ranges
1956 *
1957 * PARAMETERS :
1958 *
1959 *==========================================================================*/
1960void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
1961                                          int32_t* fpsRangesTable)
1962{
1963    int j = 0;
1964    for (int i = 0; i < size; i++) {
1965        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
1966        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
1967        j+=2;
1968    }
1969}
1970
1971/*===========================================================================
1972 * FUNCTION   : makeOverridesList
1973 *
1974 * DESCRIPTION: make a list of scene mode overrides
1975 *
1976 * PARAMETERS :
1977 *
1978 *
1979 *==========================================================================*/
1980void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
1981                                                  uint8_t size, uint8_t* overridesList,
1982                                                  uint8_t* supported_indexes)
1983{
1984    /*daemon will give a list of overrides for all scene modes.
1985      However we should send the fwk only the overrides for the scene modes
1986      supported by the framework*/
1987    int j = 0, index = 0;
1988    for (int i = 0; i < size; i++) {
1989        index = supported_indexes[i];
1990        overridesList[j] = (int32_t)overridesTable[index].ae_mode;
1991        overridesList[j+1] = (int32_t)overridesTable[index].awb_mode;
1992        overridesList[j+2] = (int32_t)overridesTable[index].af_mode;
1993        j+=3;
1994    }
1995}
1996
1997/*===========================================================================
1998 * FUNCTION   : getPreviewHalPixelFormat
1999 *
2000 * DESCRIPTION: convert the format to type recognized by framework
2001 *
2002 * PARAMETERS : format : the format from backend
2003 *
2004 ** RETURN    : format recognized by framework
2005 *
2006 *==========================================================================*/
2007int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2008{
2009    int32_t halPixelFormat;
2010
2011    switch (format) {
2012    case CAM_FORMAT_YUV_420_NV12:
2013        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2014        break;
2015    case CAM_FORMAT_YUV_420_NV21:
2016        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2017        break;
2018    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2019        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2020        break;
2021    case CAM_FORMAT_YUV_420_YV12:
2022        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2023        break;
2024    case CAM_FORMAT_YUV_422_NV16:
2025    case CAM_FORMAT_YUV_422_NV61:
2026    default:
2027        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2028        break;
2029    }
2030    return halPixelFormat;
2031}
2032
2033/*===========================================================================
2034 * FUNCTION   : AddSetParmEntryToBatch
2035 *
2036 * DESCRIPTION: add set parameter entry into batch
2037 *
2038 * PARAMETERS :
2039 *   @p_table     : ptr to parameter buffer
2040 *   @paramType   : parameter type
2041 *   @paramLength : length of parameter value
2042 *   @paramValue  : ptr to parameter value
2043 *
2044 * RETURN     : int32_t type of status
2045 *              NO_ERROR  -- success
2046 *              none-zero failure code
2047 *==========================================================================*/
2048int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2049                                                          cam_intf_parm_type_t paramType,
2050                                                          uint32_t paramLength,
2051                                                          void *paramValue)
2052{
2053    int position = paramType;
2054    int current, next;
2055
2056    /*************************************************************************
2057    *                 Code to take care of linking next flags                *
2058    *************************************************************************/
2059    current = GET_FIRST_PARAM_ID(p_table);
2060    if (position == current){
2061        //DO NOTHING
2062    } else if (position < current){
2063        SET_NEXT_PARAM_ID(position, p_table, current);
2064        SET_FIRST_PARAM_ID(p_table, position);
2065    } else {
2066        /* Search for the position in the linked list where we need to slot in*/
2067        while (position > GET_NEXT_PARAM_ID(current, p_table))
2068            current = GET_NEXT_PARAM_ID(current, p_table);
2069
2070        /*If node already exists no need to alter linking*/
2071        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2072            next = GET_NEXT_PARAM_ID(current, p_table);
2073            SET_NEXT_PARAM_ID(current, p_table, position);
2074            SET_NEXT_PARAM_ID(position, p_table, next);
2075        }
2076    }
2077
2078    /*************************************************************************
2079    *                   Copy contents into entry                             *
2080    *************************************************************************/
2081
2082    if (paramLength > sizeof(parm_type_t)) {
2083        ALOGE("%s:Size of input larger than max entry size",__func__);
2084        return BAD_VALUE;
2085    }
2086    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2087    return NO_ERROR;
2088}
2089
2090/*===========================================================================
2091 * FUNCTION   : lookupFwkName
2092 *
2093 * DESCRIPTION: In case the enum is not same in fwk and backend
2094 *              make sure the parameter is correctly propogated
2095 *
2096 * PARAMETERS  :
2097 *   @arr      : map between the two enums
2098 *   @len      : len of the map
2099 *   @hal_name : name of the hal_parm to map
2100 *
2101 * RETURN     : int type of status
2102 *              fwk_name  -- success
2103 *              none-zero failure code
2104 *==========================================================================*/
2105int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2106                                             int len, int hal_name)
2107{
2108
2109    for (int i = 0; i < len; i++) {
2110        if (arr[i].hal_name == hal_name)
2111            return arr[i].fwk_name;
2112    }
2113
2114    /* Not able to find matching framework type is not necessarily
2115     * an error case. This happens when mm-camera supports more attributes
2116     * than the frameworks do */
2117    ALOGD("%s: Cannot find matching framework type", __func__);
2118    return NAME_NOT_FOUND;
2119}
2120
2121/*===========================================================================
2122 * FUNCTION   : lookupHalName
2123 *
2124 * DESCRIPTION: In case the enum is not same in fwk and backend
2125 *              make sure the parameter is correctly propogated
2126 *
2127 * PARAMETERS  :
2128 *   @arr      : map between the two enums
2129 *   @len      : len of the map
2130 *   @fwk_name : name of the hal_parm to map
2131 *
2132 * RETURN     : int32_t type of status
2133 *              hal_name  -- success
2134 *              none-zero failure code
2135 *==========================================================================*/
2136int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2137                                             int len, int fwk_name)
2138{
2139    for (int i = 0; i < len; i++) {
2140       if (arr[i].fwk_name == fwk_name)
2141           return arr[i].hal_name;
2142    }
2143    ALOGE("%s: Cannot find matching hal type", __func__);
2144    return NAME_NOT_FOUND;
2145}
2146
2147/*===========================================================================
2148 * FUNCTION   : getCapabilities
2149 *
2150 * DESCRIPTION: query camera capabilities
2151 *
2152 * PARAMETERS :
2153 *   @cameraId  : camera Id
2154 *   @info      : camera info struct to be filled in with camera capabilities
2155 *
2156 * RETURN     : int32_t type of status
2157 *              NO_ERROR  -- success
2158 *              none-zero failure code
2159 *==========================================================================*/
2160int QCamera3HardwareInterface::getCamInfo(int cameraId,
2161                                    struct camera_info *info)
2162{
2163    int rc = 0;
2164
2165    if (NULL == gCamCapability[cameraId]) {
2166        rc = initCapabilities(cameraId);
2167        if (rc < 0) {
2168            //pthread_mutex_unlock(&g_camlock);
2169            return rc;
2170        }
2171    }
2172
2173    if (NULL == gStaticMetadata[cameraId]) {
2174        rc = initStaticMetadata(cameraId);
2175        if (rc < 0) {
2176            return rc;
2177        }
2178    }
2179
2180    switch(gCamCapability[cameraId]->position) {
2181    case CAM_POSITION_BACK:
2182        info->facing = CAMERA_FACING_BACK;
2183        break;
2184
2185    case CAM_POSITION_FRONT:
2186        info->facing = CAMERA_FACING_FRONT;
2187        break;
2188
2189    default:
2190        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2191        rc = -1;
2192        break;
2193    }
2194
2195
2196    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2197    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2198    info->static_camera_characteristics = gStaticMetadata[cameraId];
2199
2200    return rc;
2201}
2202
2203/*===========================================================================
2204 * FUNCTION   : translateMetadata
2205 *
2206 * DESCRIPTION: translate the metadata into camera_metadata_t
2207 *
2208 * PARAMETERS : type of the request
2209 *
2210 *
2211 * RETURN     : success: camera_metadata_t*
2212 *              failure: NULL
2213 *
2214 *==========================================================================*/
2215camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2216{
2217    pthread_mutex_lock(&mMutex);
2218
2219    if (mDefaultMetadata[type] != NULL) {
2220        pthread_mutex_unlock(&mMutex);
2221        return mDefaultMetadata[type];
2222    }
2223    //first time we are handling this request
2224    //fill up the metadata structure using the wrapper class
2225    CameraMetadata settings;
2226    //translate from cam_capability_t to camera_metadata_tag_t
2227    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2228    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2229
2230    /*control*/
2231
2232    uint8_t controlIntent = 0;
2233    switch (type) {
2234      case CAMERA3_TEMPLATE_PREVIEW:
2235        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2236        break;
2237      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2238        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2239        break;
2240      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2241        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2242        break;
2243      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2244        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2245        break;
2246      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2247        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2248        break;
2249      default:
2250        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2251        break;
2252    }
2253    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2254
2255    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2256            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2257
2258    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2259    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2260
2261    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2262    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2263
2264    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2265    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2266
2267    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2268    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2269
2270    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2271    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2272
2273    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2274    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2275
2276    static uint8_t focusMode;
2277    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2278        ALOGE("%s: Setting focus mode to auto", __func__);
2279        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2280    } else {
2281        ALOGE("%s: Setting focus mode to off", __func__);
2282        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2283    }
2284    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2285
2286    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2287    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2288
2289    /*flash*/
2290    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2291    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2292
2293
2294    /* lens */
2295    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2296    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2297
2298    if (gCamCapability[mCameraId]->filter_densities_count) {
2299        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2300        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2301                        gCamCapability[mCameraId]->filter_densities_count);
2302    }
2303
2304    /* TODO: Enable focus lengths once supported*/
2305    /*if (gCamCapability[mCameraId]->focal_lengths_count) {
2306        float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0];
2307        settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2308    }*/
2309
2310    mDefaultMetadata[type] = settings.release();
2311
2312    pthread_mutex_unlock(&mMutex);
2313    return mDefaultMetadata[type];
2314}
2315
2316/*===========================================================================
2317 * FUNCTION   : setFrameParameters
2318 *
2319 * DESCRIPTION: set parameters per frame as requested in the metadata from
2320 *              framework
2321 *
2322 * PARAMETERS :
2323 *   @settings  : frame settings information from framework
2324 *
2325 *
2326 * RETURN     : success: NO_ERROR
2327 *              failure:
2328 *==========================================================================*/
2329int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2330                                                  const camera_metadata_t *settings)
2331{
2332    /*translate from camera_metadata_t type to parm_type_t*/
2333    int rc = 0;
2334    if (settings == NULL && mFirstRequest) {
2335        /*settings cannot be null for the first request*/
2336        return BAD_VALUE;
2337    }
2338
2339    int32_t hal_version = CAM_HAL_V3;
2340
2341    memset(mParameters, 0, sizeof(parm_buffer_t));
2342    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2343    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2344                sizeof(hal_version), &hal_version);
2345
2346    /*we need to update the frame number in the parameters*/
2347    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2348                                sizeof(frame_id), &frame_id);
2349    if (rc < 0) {
2350        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2351        return BAD_VALUE;
2352    }
2353
2354    if(settings != NULL){
2355        rc = translateMetadataToParameters(settings);
2356    }
2357    /*set the parameters to backend*/
2358    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2359    return rc;
2360}
2361
2362/*===========================================================================
2363 * FUNCTION   : translateMetadataToParameters
2364 *
2365 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2366 *
2367 *
2368 * PARAMETERS :
2369 *   @settings  : frame settings information from framework
2370 *
2371 *
2372 * RETURN     : success: NO_ERROR
2373 *              failure:
2374 *==========================================================================*/
2375int QCamera3HardwareInterface::translateMetadataToParameters
2376                                  (const camera_metadata_t *settings)
2377{
2378    int rc = 0;
2379    CameraMetadata frame_settings;
2380    frame_settings = settings;
2381
2382
2383    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2384        int32_t antibandingMode =
2385            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2386        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2387                sizeof(antibandingMode), &antibandingMode);
2388    }
2389
2390    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2391        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2392        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2393          sizeof(expCompensation), &expCompensation);
2394    }
2395
2396    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2397        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2398        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2399                sizeof(aeLock), &aeLock);
2400    }
2401
2402    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2403        cam_fps_range_t fps_range;
2404        fps_range.min_fps =
2405            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2406        fps_range.max_fps =
2407            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2408        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2409                sizeof(fps_range), &fps_range);
2410    }
2411
2412    float focalDistance = -1.0;
2413    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2414        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2415        rc = AddSetParmEntryToBatch(mParameters,
2416                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2417                sizeof(focalDistance), &focalDistance);
2418    }
2419
2420    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2421        uint8_t fwk_focusMode =
2422            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2423        uint8_t focusMode;
2424        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2425            focusMode = CAM_FOCUS_MODE_INFINITY;
2426        } else{
2427         focusMode = lookupHalName(FOCUS_MODES_MAP,
2428                                   sizeof(FOCUS_MODES_MAP),
2429                                   fwk_focusMode);
2430        }
2431        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2432                sizeof(focusMode), &focusMode);
2433    }
2434
2435    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2436        uint8_t awbLock =
2437            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2438        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2439                sizeof(awbLock), &awbLock);
2440    }
2441
2442    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2443        uint8_t fwk_whiteLevel =
2444            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2445        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2446                sizeof(WHITE_BALANCE_MODES_MAP),
2447                fwk_whiteLevel);
2448        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2449                sizeof(whiteLevel), &whiteLevel);
2450    }
2451
2452    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2453        uint8_t fwk_effectMode =
2454            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2455        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2456                sizeof(EFFECT_MODES_MAP),
2457                fwk_effectMode);
2458        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2459                sizeof(effectMode), &effectMode);
2460    }
2461
2462    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2463        uint8_t fwk_aeMode =
2464            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2465        uint8_t aeMode;
2466        int32_t redeye;
2467        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2468            aeMode = CAM_AE_MODE_OFF;
2469        } else {
2470            aeMode = CAM_AE_MODE_ON;
2471        }
2472        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2473            redeye = 1;
2474        } else {
2475            redeye = 0;
2476        }
2477        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2478                                          sizeof(AE_FLASH_MODE_MAP),
2479                                          aeMode);
2480        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2481                sizeof(aeMode), &aeMode);
2482        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2483                sizeof(flashMode), &flashMode);
2484        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2485                sizeof(redeye), &redeye);
2486    }
2487
2488    if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) {
2489        int32_t metaFrameNumber =
2490            frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0];
2491        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2492                sizeof(metaFrameNumber), &metaFrameNumber);
2493    }
2494
2495    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2496        uint8_t colorCorrectMode =
2497            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2498        rc =
2499            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2500                    sizeof(colorCorrectMode), &colorCorrectMode);
2501    }
2502    cam_trigger_t aecTrigger;
2503    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2504    aecTrigger.trigger_id = -1;
2505    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2506        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2507        aecTrigger.trigger =
2508            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2509        aecTrigger.trigger_id =
2510            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2511    }
2512    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2513                                sizeof(aecTrigger), &aecTrigger);
2514
2515    /*af_trigger must come with a trigger id*/
2516    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2517        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2518        cam_trigger_t af_trigger;
2519        af_trigger.trigger =
2520            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2521        af_trigger.trigger_id =
2522            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2523        rc = AddSetParmEntryToBatch(mParameters,
2524                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2525    }
2526
2527    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2528        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2529        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2530                sizeof(metaMode), &metaMode);
2531    }
2532
2533    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2534        int32_t demosaic =
2535            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2536        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2537                sizeof(demosaic), &demosaic);
2538    }
2539
2540    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2541        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2542        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2543                sizeof(edgeMode), &edgeMode);
2544    }
2545
2546    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2547        int32_t edgeStrength =
2548            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2549        rc = AddSetParmEntryToBatch(mParameters,
2550                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2551    }
2552
2553    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2554        uint8_t flashMode =
2555            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2556        rc = AddSetParmEntryToBatch(mParameters,
2557                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2558    }
2559
2560    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2561        uint8_t flashPower =
2562            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2563        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2564                sizeof(flashPower), &flashPower);
2565    }
2566
2567    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2568        int64_t flashFiringTime =
2569            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2570        rc = AddSetParmEntryToBatch(mParameters,
2571                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2572    }
2573
2574    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2575        uint8_t geometricMode =
2576            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2577        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2578                sizeof(geometricMode), &geometricMode);
2579    }
2580
2581    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2582        uint8_t geometricStrength =
2583            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2584        rc = AddSetParmEntryToBatch(mParameters,
2585                CAM_INTF_META_GEOMETRIC_STRENGTH,
2586                sizeof(geometricStrength), &geometricStrength);
2587    }
2588
2589    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2590        uint8_t hotPixelMode =
2591            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2592        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2593                sizeof(hotPixelMode), &hotPixelMode);
2594    }
2595
2596    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2597        float lensAperture =
2598            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2599        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2600                sizeof(lensAperture), &lensAperture);
2601    }
2602
2603    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2604        float filterDensity =
2605            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2606        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2607                sizeof(filterDensity), &filterDensity);
2608    }
2609
2610    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2611        float focalLength =
2612            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2613        rc = AddSetParmEntryToBatch(mParameters,
2614                CAM_INTF_META_LENS_FOCAL_LENGTH,
2615                sizeof(focalLength), &focalLength);
2616    }
2617
2618    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2619        uint8_t optStabMode =
2620            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2621        rc = AddSetParmEntryToBatch(mParameters,
2622                CAM_INTF_META_LENS_OPT_STAB_MODE,
2623                sizeof(optStabMode), &optStabMode);
2624    }
2625
2626    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2627        uint8_t noiseRedMode =
2628            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2629        rc = AddSetParmEntryToBatch(mParameters,
2630                CAM_INTF_META_NOISE_REDUCTION_MODE,
2631                sizeof(noiseRedMode), &noiseRedMode);
2632    }
2633
2634    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2635        uint8_t noiseRedStrength =
2636            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2637        rc = AddSetParmEntryToBatch(mParameters,
2638                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2639                sizeof(noiseRedStrength), &noiseRedStrength);
2640    }
2641
2642    cam_crop_region_t scalerCropRegion;
2643    bool scalerCropSet = false;
2644    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2645        scalerCropRegion.left =
2646            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2647        scalerCropRegion.top =
2648            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2649        scalerCropRegion.width =
2650            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2651        scalerCropRegion.height =
2652            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2653        rc = AddSetParmEntryToBatch(mParameters,
2654                CAM_INTF_META_SCALER_CROP_REGION,
2655                sizeof(scalerCropRegion), &scalerCropRegion);
2656        scalerCropSet = true;
2657    }
2658
2659    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2660        int64_t sensorExpTime =
2661            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2662        rc = AddSetParmEntryToBatch(mParameters,
2663                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2664                sizeof(sensorExpTime), &sensorExpTime);
2665    }
2666
2667    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2668        int64_t sensorFrameDuration =
2669            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2670        rc = AddSetParmEntryToBatch(mParameters,
2671                CAM_INTF_META_SENSOR_FRAME_DURATION,
2672                sizeof(sensorFrameDuration), &sensorFrameDuration);
2673    }
2674
2675    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2676        int32_t sensorSensitivity =
2677            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2678        rc = AddSetParmEntryToBatch(mParameters,
2679                CAM_INTF_META_SENSOR_SENSITIVITY,
2680                sizeof(sensorSensitivity), &sensorSensitivity);
2681    }
2682
2683    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2684        int32_t shadingMode =
2685            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2686        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2687                sizeof(shadingMode), &shadingMode);
2688    }
2689
2690    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2691        uint8_t shadingStrength =
2692            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2693        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2694                sizeof(shadingStrength), &shadingStrength);
2695    }
2696
2697    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2698        uint8_t facedetectMode =
2699            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2700        rc = AddSetParmEntryToBatch(mParameters,
2701                CAM_INTF_META_STATS_FACEDETECT_MODE,
2702                sizeof(facedetectMode), &facedetectMode);
2703    }
2704
2705    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2706        uint8_t histogramMode =
2707            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2708        rc = AddSetParmEntryToBatch(mParameters,
2709                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2710                sizeof(histogramMode), &histogramMode);
2711    }
2712
2713    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2714        uint8_t sharpnessMapMode =
2715            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2716        rc = AddSetParmEntryToBatch(mParameters,
2717                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2718                sizeof(sharpnessMapMode), &sharpnessMapMode);
2719    }
2720
2721    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2722        uint8_t tonemapMode =
2723            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2724        rc = AddSetParmEntryToBatch(mParameters,
2725                CAM_INTF_META_TONEMAP_MODE,
2726                sizeof(tonemapMode), &tonemapMode);
2727    }
2728
2729    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2730        uint8_t captureIntent =
2731            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2732        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2733                sizeof(captureIntent), &captureIntent);
2734    }
2735
2736    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2737        cam_area_t roi;
2738        bool reset = true;
2739        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2740        if (scalerCropSet) {
2741            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2742        }
2743        if (reset) {
2744            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2745                    sizeof(roi), &roi);
2746        }
2747    }
2748
2749    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2750        cam_area_t roi;
2751        bool reset = true;
2752        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2753        if (scalerCropSet) {
2754            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2755        }
2756        if (reset) {
2757            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2758                    sizeof(roi), &roi);
2759        }
2760    }
2761
2762    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2763        cam_area_t roi;
2764        bool reset = true;
2765        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2766        if (scalerCropSet) {
2767            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2768        }
2769        if (reset) {
2770            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2771                    sizeof(roi), &roi);
2772        }
2773    }
2774    return rc;
2775}
2776
2777/*===========================================================================
2778 * FUNCTION   : getJpegSettings
2779 *
2780 * DESCRIPTION: save the jpeg settings in the HAL
2781 *
2782 *
2783 * PARAMETERS :
2784 *   @settings  : frame settings information from framework
2785 *
2786 *
2787 * RETURN     : success: NO_ERROR
2788 *              failure:
2789 *==========================================================================*/
2790int QCamera3HardwareInterface::getJpegSettings
2791                                  (const camera_metadata_t *settings)
2792{
2793    if (mJpegSettings) {
2794        free(mJpegSettings);
2795        mJpegSettings = NULL;
2796    }
2797    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2798    CameraMetadata jpeg_settings;
2799    jpeg_settings = settings;
2800
2801    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2802        mJpegSettings->jpeg_orientation =
2803            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
2804    } else {
2805        mJpegSettings->jpeg_orientation = 0;
2806    }
2807    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
2808        mJpegSettings->jpeg_quality =
2809            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
2810    } else {
2811        mJpegSettings->jpeg_quality = 85;
2812    }
2813    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2814        mJpegSettings->thumbnail_size.width =
2815            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
2816        mJpegSettings->thumbnail_size.height =
2817            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
2818    } else {
2819        mJpegSettings->thumbnail_size.width = 0;
2820        mJpegSettings->thumbnail_size.height = 0;
2821    }
2822    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
2823        for (int i = 0; i < 3; i++) {
2824            mJpegSettings->gps_coordinates[i] =
2825                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
2826        }
2827    }
2828    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
2829        mJpegSettings->gps_timestamp =
2830            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
2831    }
2832
2833    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
2834        mJpegSettings->gps_processing_method =
2835            jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0];
2836    }
2837    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2838        mJpegSettings->sensor_sensitivity =
2839            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2840    }
2841    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2842        mJpegSettings->lens_focal_length =
2843            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2844    }
2845    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2846        mJpegSettings->exposure_compensation =
2847            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2848    }
2849    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
2850    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
2851    return 0;
2852}
2853
2854/*===========================================================================
2855 * FUNCTION   : captureResultCb
2856 *
2857 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
2858 *
2859 * PARAMETERS :
2860 *   @frame  : frame information from mm-camera-interface
2861 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
2862 *   @userdata: userdata
2863 *
2864 * RETURN     : NONE
2865 *==========================================================================*/
2866void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
2867                camera3_stream_buffer_t *buffer,
2868                uint32_t frame_number, void *userdata)
2869{
2870    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
2871    if (hw == NULL) {
2872        ALOGE("%s: Invalid hw %p", __func__, hw);
2873        return;
2874    }
2875
2876    hw->captureResultCb(metadata, buffer, frame_number);
2877    return;
2878}
2879
2880/*===========================================================================
2881 * FUNCTION   : initialize
2882 *
2883 * DESCRIPTION: Pass framework callback pointers to HAL
2884 *
2885 * PARAMETERS :
2886 *
2887 *
2888 * RETURN     : Success : 0
2889 *              Failure: -ENODEV
2890 *==========================================================================*/
2891
2892int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
2893                                  const camera3_callback_ops_t *callback_ops)
2894{
2895    ALOGV("%s: E", __func__);
2896    QCamera3HardwareInterface *hw =
2897        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2898    if (!hw) {
2899        ALOGE("%s: NULL camera device", __func__);
2900        return -ENODEV;
2901    }
2902
2903    int rc = hw->initialize(callback_ops);
2904    ALOGV("%s: X", __func__);
2905    return rc;
2906}
2907
2908/*===========================================================================
2909 * FUNCTION   : configure_streams
2910 *
2911 * DESCRIPTION:
2912 *
2913 * PARAMETERS :
2914 *
2915 *
2916 * RETURN     : Success: 0
2917 *              Failure: -EINVAL (if stream configuration is invalid)
2918 *                       -ENODEV (fatal error)
2919 *==========================================================================*/
2920
2921int QCamera3HardwareInterface::configure_streams(
2922        const struct camera3_device *device,
2923        camera3_stream_configuration_t *stream_list)
2924{
2925    ALOGV("%s: E", __func__);
2926    QCamera3HardwareInterface *hw =
2927        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2928    if (!hw) {
2929        ALOGE("%s: NULL camera device", __func__);
2930        return -ENODEV;
2931    }
2932    int rc = hw->configureStreams(stream_list);
2933    ALOGV("%s: X", __func__);
2934    return rc;
2935}
2936
2937/*===========================================================================
2938 * FUNCTION   : register_stream_buffers
2939 *
2940 * DESCRIPTION: Register stream buffers with the device
2941 *
2942 * PARAMETERS :
2943 *
2944 * RETURN     :
2945 *==========================================================================*/
2946int QCamera3HardwareInterface::register_stream_buffers(
2947        const struct camera3_device *device,
2948        const camera3_stream_buffer_set_t *buffer_set)
2949{
2950    ALOGV("%s: E", __func__);
2951    QCamera3HardwareInterface *hw =
2952        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2953    if (!hw) {
2954        ALOGE("%s: NULL camera device", __func__);
2955        return -ENODEV;
2956    }
2957    int rc = hw->registerStreamBuffers(buffer_set);
2958    ALOGV("%s: X", __func__);
2959    return rc;
2960}
2961
2962/*===========================================================================
2963 * FUNCTION   : construct_default_request_settings
2964 *
2965 * DESCRIPTION: Configure a settings buffer to meet the required use case
2966 *
2967 * PARAMETERS :
2968 *
2969 *
2970 * RETURN     : Success: Return valid metadata
2971 *              Failure: Return NULL
2972 *==========================================================================*/
2973const camera_metadata_t* QCamera3HardwareInterface::
2974    construct_default_request_settings(const struct camera3_device *device,
2975                                        int type)
2976{
2977
2978    ALOGV("%s: E", __func__);
2979    camera_metadata_t* fwk_metadata = NULL;
2980    QCamera3HardwareInterface *hw =
2981        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2982    if (!hw) {
2983        ALOGE("%s: NULL camera device", __func__);
2984        return NULL;
2985    }
2986
2987    fwk_metadata = hw->translateCapabilityToMetadata(type);
2988
2989    ALOGV("%s: X", __func__);
2990    return fwk_metadata;
2991}
2992
2993/*===========================================================================
2994 * FUNCTION   : process_capture_request
2995 *
2996 * DESCRIPTION:
2997 *
2998 * PARAMETERS :
2999 *
3000 *
3001 * RETURN     :
3002 *==========================================================================*/
3003int QCamera3HardwareInterface::process_capture_request(
3004                    const struct camera3_device *device,
3005                    camera3_capture_request_t *request)
3006{
3007    ALOGV("%s: E", __func__);
3008    QCamera3HardwareInterface *hw =
3009        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3010    if (!hw) {
3011        ALOGE("%s: NULL camera device", __func__);
3012        return -EINVAL;
3013    }
3014
3015    int rc = hw->processCaptureRequest(request);
3016    ALOGV("%s: X", __func__);
3017    return rc;
3018}
3019
3020/*===========================================================================
3021 * FUNCTION   : get_metadata_vendor_tag_ops
3022 *
3023 * DESCRIPTION:
3024 *
3025 * PARAMETERS :
3026 *
3027 *
3028 * RETURN     :
3029 *==========================================================================*/
3030
3031void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3032                const struct camera3_device *device,
3033                vendor_tag_query_ops_t* ops)
3034{
3035    ALOGV("%s: E", __func__);
3036    QCamera3HardwareInterface *hw =
3037        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3038    if (!hw) {
3039        ALOGE("%s: NULL camera device", __func__);
3040        return;
3041    }
3042
3043    hw->getMetadataVendorTagOps(ops);
3044    ALOGV("%s: X", __func__);
3045    return;
3046}
3047
3048/*===========================================================================
3049 * FUNCTION   : dump
3050 *
3051 * DESCRIPTION:
3052 *
3053 * PARAMETERS :
3054 *
3055 *
3056 * RETURN     :
3057 *==========================================================================*/
3058
3059void QCamera3HardwareInterface::dump(
3060                const struct camera3_device *device, int fd)
3061{
3062    ALOGV("%s: E", __func__);
3063    QCamera3HardwareInterface *hw =
3064        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3065    if (!hw) {
3066        ALOGE("%s: NULL camera device", __func__);
3067        return;
3068    }
3069
3070    hw->dump(fd);
3071    ALOGV("%s: X", __func__);
3072    return;
3073}
3074
3075/*===========================================================================
3076 * FUNCTION   : close_camera_device
3077 *
3078 * DESCRIPTION:
3079 *
3080 * PARAMETERS :
3081 *
3082 *
3083 * RETURN     :
3084 *==========================================================================*/
3085int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3086{
3087    ALOGV("%s: E", __func__);
3088    int ret = NO_ERROR;
3089    QCamera3HardwareInterface *hw =
3090        reinterpret_cast<QCamera3HardwareInterface *>(
3091            reinterpret_cast<camera3_device_t *>(device)->priv);
3092    if (!hw) {
3093        ALOGE("NULL camera device");
3094        return BAD_VALUE;
3095    }
3096    delete hw;
3097    ALOGV("%s: X", __func__);
3098    return ret;
3099}
3100
3101}; //end namespace qcamera
3102