QCamera3HWI.cpp revision 9a351c81529b36fd8b6978ca21a27bdc56a01355
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
54    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
55    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
56    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
57    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
58    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
59    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
60    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
61    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
62    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
63};
64
65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
66    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
67    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
68    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
69    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
70    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
71    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
72    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
73    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
74    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
75};
76
77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
78    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
79    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
80    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
81    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
82    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
83    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
84    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
85    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
86    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
87    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
88    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
89    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
90    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
91    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
92    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
93};
94
95const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
96    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
97    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
98    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
99    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
100    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
101    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
105    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
106    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
107    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
108    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
112    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
113    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
114    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
115    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
116    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
120    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
121    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
122    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
123};
124
125const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
126                                             320, 240, 176, 144, 0, 0};
127
128camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
129    initialize:                         QCamera3HardwareInterface::initialize,
130    configure_streams:                  QCamera3HardwareInterface::configure_streams,
131    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
132    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
133    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
134    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
135    dump:                               QCamera3HardwareInterface::dump,
136};
137
138
139/*===========================================================================
140 * FUNCTION   : QCamera3HardwareInterface
141 *
142 * DESCRIPTION: constructor of QCamera3HardwareInterface
143 *
144 * PARAMETERS :
145 *   @cameraId  : camera ID
146 *
147 * RETURN     : none
148 *==========================================================================*/
149QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
150    : mCameraId(cameraId),
151      mCameraHandle(NULL),
152      mCameraOpened(false),
153      mCallbackOps(NULL),
154      mInputStream(NULL),
155      mMetadataChannel(NULL),
156      mFirstRequest(false),
157      mParamHeap(NULL),
158      mParameters(NULL),
159      mJpegSettings(NULL)
160{
161    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
162    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
163    mCameraDevice.common.close = close_camera_device;
164    mCameraDevice.ops = &mCameraOps;
165    mCameraDevice.priv = this;
166    gCamCapability[cameraId]->version = CAM_HAL_V3;
167
168    pthread_mutex_init(&mRequestLock, NULL);
169    pthread_cond_init(&mRequestCond, NULL);
170    mPendingRequest = 0;
171    mCurrentRequestId = -1;
172
173    pthread_mutex_init(&mMutex, NULL);
174    pthread_mutex_init(&mCaptureResultLock, NULL);
175
176    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
177        mDefaultMetadata[i] = NULL;
178}
179
180/*===========================================================================
181 * FUNCTION   : ~QCamera3HardwareInterface
182 *
183 * DESCRIPTION: destructor of QCamera3HardwareInterface
184 *
185 * PARAMETERS : none
186 *
187 * RETURN     : none
188 *==========================================================================*/
189QCamera3HardwareInterface::~QCamera3HardwareInterface()
190{
191    ALOGV("%s: E", __func__);
192    /* Clean up all channels */
193    if (mMetadataChannel) {
194        mMetadataChannel->stop();
195        delete mMetadataChannel;
196        mMetadataChannel = NULL;
197    }
198    /* We need to stop all streams before deleting any stream */
199    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
200        it != mStreamInfo.end(); it++) {
201        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
202        channel->stop();
203    }
204    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
205        it != mStreamInfo.end(); it++) {
206        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
207        delete channel;
208        free (*it);
209    }
210
211    if (mJpegSettings != NULL) {
212        free(mJpegSettings);
213        mJpegSettings = NULL;
214    }
215
216    deinitParameters();
217    closeCamera();
218
219    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
220        if (mDefaultMetadata[i])
221            free_camera_metadata(mDefaultMetadata[i]);
222
223    pthread_mutex_destroy(&mRequestLock);
224    pthread_cond_destroy(&mRequestCond);
225
226    pthread_mutex_destroy(&mMutex);
227    pthread_mutex_destroy(&mCaptureResultLock);
228    ALOGV("%s: X", __func__);
229}
230
231/*===========================================================================
232 * FUNCTION   : openCamera
233 *
234 * DESCRIPTION: open camera
235 *
236 * PARAMETERS :
237 *   @hw_device  : double ptr for camera device struct
238 *
239 * RETURN     : int32_t type of status
240 *              NO_ERROR  -- success
241 *              none-zero failure code
242 *==========================================================================*/
243int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
244{
245    //int rc = NO_ERROR;
246    int rc = 0;
247    if (mCameraOpened) {
248        *hw_device = NULL;
249        return PERMISSION_DENIED;
250    }
251
252    rc = openCamera();
253    if (rc == 0)
254        *hw_device = &mCameraDevice.common;
255    else
256        *hw_device = NULL;
257    return rc;
258}
259
260/*===========================================================================
261 * FUNCTION   : openCamera
262 *
263 * DESCRIPTION: open camera
264 *
265 * PARAMETERS : none
266 *
267 * RETURN     : int32_t type of status
268 *              NO_ERROR  -- success
269 *              none-zero failure code
270 *==========================================================================*/
271int QCamera3HardwareInterface::openCamera()
272{
273    if (mCameraHandle) {
274        ALOGE("Failure: Camera already opened");
275        return ALREADY_EXISTS;
276    }
277    mCameraHandle = camera_open(mCameraId);
278    if (!mCameraHandle) {
279        ALOGE("camera_open failed.");
280        return UNKNOWN_ERROR;
281    }
282
283    mCameraOpened = true;
284
285    return NO_ERROR;
286}
287
288/*===========================================================================
289 * FUNCTION   : closeCamera
290 *
291 * DESCRIPTION: close camera
292 *
293 * PARAMETERS : none
294 *
295 * RETURN     : int32_t type of status
296 *              NO_ERROR  -- success
297 *              none-zero failure code
298 *==========================================================================*/
299int QCamera3HardwareInterface::closeCamera()
300{
301    int rc = NO_ERROR;
302
303    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
304    mCameraHandle = NULL;
305    mCameraOpened = false;
306
307    return rc;
308}
309
310/*===========================================================================
311 * FUNCTION   : initialize
312 *
313 * DESCRIPTION: Initialize frameworks callback functions
314 *
315 * PARAMETERS :
316 *   @callback_ops : callback function to frameworks
317 *
318 * RETURN     :
319 *
320 *==========================================================================*/
321int QCamera3HardwareInterface::initialize(
322        const struct camera3_callback_ops *callback_ops)
323{
324    int rc;
325
326    pthread_mutex_lock(&mMutex);
327
328    rc = initParameters();
329    if (rc < 0) {
330        ALOGE("%s: initParamters failed %d", __func__, rc);
331       goto err1;
332    }
333    //Create metadata channel and initialize it
334    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
335                    mCameraHandle->ops, captureResultCb,
336                    &gCamCapability[mCameraId]->padding_info, this);
337    if (mMetadataChannel == NULL) {
338        ALOGE("%s: failed to allocate metadata channel", __func__);
339        rc = -ENOMEM;
340        goto err2;
341    }
342    rc = mMetadataChannel->initialize();
343    if (rc < 0) {
344        ALOGE("%s: metadata channel initialization failed", __func__);
345        goto err3;
346    }
347
348    mCallbackOps = callback_ops;
349
350    pthread_mutex_unlock(&mMutex);
351    return 0;
352
353err3:
354    delete mMetadataChannel;
355    mMetadataChannel = NULL;
356err2:
357    deinitParameters();
358err1:
359    pthread_mutex_unlock(&mMutex);
360    return rc;
361}
362
363/*===========================================================================
364 * FUNCTION   : configureStreams
365 *
366 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
367 *              and output streams.
368 *
369 * PARAMETERS :
370 *   @stream_list : streams to be configured
371 *
372 * RETURN     :
373 *
374 *==========================================================================*/
375int QCamera3HardwareInterface::configureStreams(
376        camera3_stream_configuration_t *streamList)
377{
378    int rc = 0;
379    pthread_mutex_lock(&mMutex);
380
381    // Sanity check stream_list
382    if (streamList == NULL) {
383        ALOGE("%s: NULL stream configuration", __func__);
384        pthread_mutex_unlock(&mMutex);
385        return BAD_VALUE;
386    }
387
388    if (streamList->streams == NULL) {
389        ALOGE("%s: NULL stream list", __func__);
390        pthread_mutex_unlock(&mMutex);
391        return BAD_VALUE;
392    }
393
394    if (streamList->num_streams < 1) {
395        ALOGE("%s: Bad number of streams requested: %d", __func__,
396                streamList->num_streams);
397        pthread_mutex_unlock(&mMutex);
398        return BAD_VALUE;
399    }
400
401    camera3_stream_t *inputStream = NULL;
402    /* first invalidate all the steams in the mStreamList
403     * if they appear again, they will be validated */
404    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
405            it != mStreamInfo.end(); it++) {
406        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
407        channel->stop();
408        (*it)->status = INVALID;
409    }
410
411    for (size_t i = 0; i < streamList->num_streams; i++) {
412        camera3_stream_t *newStream = streamList->streams[i];
413        ALOGV("%s: newStream type = %d, stream format = %d",
414                __func__, newStream->stream_type, newStream->format);
415        //if the stream is in the mStreamList validate it
416        bool stream_exists = false;
417        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
418                it != mStreamInfo.end(); it++) {
419            if ((*it)->stream == newStream) {
420                QCamera3Channel *channel =
421                    (QCamera3Channel*)(*it)->stream->priv;
422                stream_exists = true;
423                (*it)->status = RECONFIGURE;
424                /*delete the channel object associated with the stream because
425                  we need to reconfigure*/
426                delete channel;
427                (*it)->stream->priv = NULL;
428            }
429        }
430        if (!stream_exists) {
431            //new stream
432            stream_info_t* stream_info;
433            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
434            stream_info->stream = newStream;
435            stream_info->status = VALID;
436            stream_info->registered = 0;
437            mStreamInfo.push_back(stream_info);
438        }
439        if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
440            if (inputStream != NULL) {
441                ALOGE("%s: Multiple input streams requested!", __func__);
442                pthread_mutex_unlock(&mMutex);
443                return BAD_VALUE;
444            }
445            inputStream = newStream;
446        }
447    }
448    mInputStream = inputStream;
449
450    /*clean up invalid streams*/
451    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
452            it != mStreamInfo.end();) {
453        if(((*it)->status) == INVALID){
454            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
455            delete channel;
456            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
457            free(*it);
458            it = mStreamInfo.erase(it);
459        } else {
460            it++;
461        }
462    }
463
464    //mMetadataChannel->stop();
465
466    /* Allocate channel objects for the requested streams */
467    for (size_t i = 0; i < streamList->num_streams; i++) {
468        camera3_stream_t *newStream = streamList->streams[i];
469        if (newStream->priv == NULL) {
470            //New stream, construct channel
471            switch (newStream->stream_type) {
472            case CAMERA3_STREAM_INPUT:
473                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
474                break;
475            case CAMERA3_STREAM_BIDIRECTIONAL:
476                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
477                    GRALLOC_USAGE_HW_CAMERA_WRITE;
478                break;
479            case CAMERA3_STREAM_OUTPUT:
480                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
481                break;
482            default:
483                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
484                break;
485            }
486
487            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
488                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
489                QCamera3Channel *channel;
490                switch (newStream->format) {
491                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
492                case HAL_PIXEL_FORMAT_YCbCr_420_888:
493                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
494                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
495                            mCameraHandle->ops, captureResultCb,
496                            &gCamCapability[mCameraId]->padding_info, this, newStream);
497                    if (channel == NULL) {
498                        ALOGE("%s: allocation of channel failed", __func__);
499                        pthread_mutex_unlock(&mMutex);
500                        return -ENOMEM;
501                    }
502
503                    newStream->priv = channel;
504                    break;
505                case HAL_PIXEL_FORMAT_BLOB:
506                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
507                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
508                            mCameraHandle->ops, captureResultCb,
509                            &gCamCapability[mCameraId]->padding_info, this, newStream);
510                    if (channel == NULL) {
511                        ALOGE("%s: allocation of channel failed", __func__);
512                        pthread_mutex_unlock(&mMutex);
513                        return -ENOMEM;
514                    }
515                    newStream->priv = channel;
516                    break;
517
518                //TODO: Add support for app consumed format?
519                default:
520                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
521                    break;
522                }
523            }
524        } else {
525            // Channel already exists for this stream
526            // Do nothing for now
527        }
528    }
529    /*For the streams to be reconfigured we need to register the buffers
530      since the framework wont*/
531    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
532            it != mStreamInfo.end(); it++) {
533        if ((*it)->status == RECONFIGURE) {
534            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
535            /*only register buffers for streams that have already been
536              registered*/
537            if ((*it)->registered) {
538                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
539                        (*it)->buffer_set.buffers);
540                if (rc != NO_ERROR) {
541                    ALOGE("%s: Failed to register the buffers of old stream,\
542                            rc = %d", __func__, rc);
543                }
544                ALOGV("%s: channel %p has %d buffers",
545                        __func__, channel, (*it)->buffer_set.num_buffers);
546            }
547        }
548
549        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
550        if (index == NAME_NOT_FOUND) {
551            mPendingBuffersMap.add((*it)->stream, 0);
552        } else {
553            mPendingBuffersMap.editValueAt(index) = 0;
554        }
555    }
556
557    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
558    mPendingRequestsList.clear();
559
560    //settings/parameters don't carry over for new configureStreams
561    memset(mParameters, 0, sizeof(parm_buffer_t));
562    mFirstRequest = true;
563
564    pthread_mutex_unlock(&mMutex);
565    return rc;
566}
567
568/*===========================================================================
569 * FUNCTION   : validateCaptureRequest
570 *
571 * DESCRIPTION: validate a capture request from camera service
572 *
573 * PARAMETERS :
574 *   @request : request from framework to process
575 *
576 * RETURN     :
577 *
578 *==========================================================================*/
579int QCamera3HardwareInterface::validateCaptureRequest(
580                    camera3_capture_request_t *request)
581{
582    ssize_t idx = 0;
583    const camera3_stream_buffer_t *b;
584    CameraMetadata meta;
585
586    /* Sanity check the request */
587    if (request == NULL) {
588        ALOGE("%s: NULL capture request", __func__);
589        return BAD_VALUE;
590    }
591
592    uint32_t frameNumber = request->frame_number;
593    if (request->input_buffer != NULL &&
594            request->input_buffer->stream != mInputStream) {
595        ALOGE("%s: Request %d: Input buffer not from input stream!",
596                __FUNCTION__, frameNumber);
597        return BAD_VALUE;
598    }
599    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
600        ALOGE("%s: Request %d: No output buffers provided!",
601                __FUNCTION__, frameNumber);
602        return BAD_VALUE;
603    }
604    if (request->input_buffer != NULL) {
605        //TODO
606        ALOGE("%s: Not supporting input buffer yet", __func__);
607        return BAD_VALUE;
608    }
609
610    // Validate all buffers
611    b = request->output_buffers;
612    do {
613        QCamera3Channel *channel =
614                static_cast<QCamera3Channel*>(b->stream->priv);
615        if (channel == NULL) {
616            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
617                    __func__, frameNumber, idx);
618            return BAD_VALUE;
619        }
620        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
621            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
622                    __func__, frameNumber, idx);
623            return BAD_VALUE;
624        }
625        if (b->release_fence != -1) {
626            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
627                    __func__, frameNumber, idx);
628            return BAD_VALUE;
629        }
630        if (b->buffer == NULL) {
631            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
632                    __func__, frameNumber, idx);
633            return BAD_VALUE;
634        }
635        idx++;
636        b = request->output_buffers + idx;
637    } while (idx < (ssize_t)request->num_output_buffers);
638
639    return NO_ERROR;
640}
641
642/*===========================================================================
643 * FUNCTION   : registerStreamBuffers
644 *
645 * DESCRIPTION: Register buffers for a given stream with the HAL device.
646 *
647 * PARAMETERS :
648 *   @stream_list : streams to be configured
649 *
650 * RETURN     :
651 *
652 *==========================================================================*/
653int QCamera3HardwareInterface::registerStreamBuffers(
654        const camera3_stream_buffer_set_t *buffer_set)
655{
656    int rc = 0;
657
658    pthread_mutex_lock(&mMutex);
659
660    if (buffer_set == NULL) {
661        ALOGE("%s: Invalid buffer_set parameter.", __func__);
662        pthread_mutex_unlock(&mMutex);
663        return -EINVAL;
664    }
665    if (buffer_set->stream == NULL) {
666        ALOGE("%s: Invalid stream parameter.", __func__);
667        pthread_mutex_unlock(&mMutex);
668        return -EINVAL;
669    }
670    if (buffer_set->num_buffers < 1) {
671        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
672        pthread_mutex_unlock(&mMutex);
673        return -EINVAL;
674    }
675    if (buffer_set->buffers == NULL) {
676        ALOGE("%s: Invalid buffers parameter.", __func__);
677        pthread_mutex_unlock(&mMutex);
678        return -EINVAL;
679    }
680
681    camera3_stream_t *stream = buffer_set->stream;
682    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
683
684    //set the buffer_set in the mStreamInfo array
685    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
686            it != mStreamInfo.end(); it++) {
687        if ((*it)->stream == stream) {
688            uint32_t numBuffers = buffer_set->num_buffers;
689            (*it)->buffer_set.stream = buffer_set->stream;
690            (*it)->buffer_set.num_buffers = numBuffers;
691            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
692            if ((*it)->buffer_set.buffers == NULL) {
693                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
694                pthread_mutex_unlock(&mMutex);
695                return -ENOMEM;
696            }
697            for (size_t j = 0; j < numBuffers; j++){
698                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
699            }
700            (*it)->registered = 1;
701        }
702    }
703
704    if (stream->stream_type != CAMERA3_STREAM_OUTPUT) {
705        ALOGE("%s: not yet support non output type stream", __func__);
706        pthread_mutex_unlock(&mMutex);
707        return -EINVAL;
708    }
709    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
710    if (rc < 0) {
711        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
712        pthread_mutex_unlock(&mMutex);
713        return -ENODEV;
714    }
715
716    pthread_mutex_unlock(&mMutex);
717    return NO_ERROR;
718}
719
720/*===========================================================================
721 * FUNCTION   : processCaptureRequest
722 *
723 * DESCRIPTION: process a capture request from camera service
724 *
725 * PARAMETERS :
726 *   @request : request from framework to process
727 *
728 * RETURN     :
729 *
730 *==========================================================================*/
731int QCamera3HardwareInterface::processCaptureRequest(
732                    camera3_capture_request_t *request)
733{
734    int rc = NO_ERROR;
735    int32_t request_id;
736    CameraMetadata meta;
737
738    pthread_mutex_lock(&mMutex);
739
740    rc = validateCaptureRequest(request);
741    if (rc != NO_ERROR) {
742        ALOGE("%s: incoming request is not valid", __func__);
743        pthread_mutex_unlock(&mMutex);
744        return rc;
745    }
746
747    uint32_t frameNumber = request->frame_number;
748
749    rc = setFrameParameters(request->frame_number, request->settings);
750    if (rc < 0) {
751        ALOGE("%s: fail to set frame parameters", __func__);
752        pthread_mutex_unlock(&mMutex);
753        return rc;
754    }
755
756    meta = request->settings;
757    if (meta.exists(ANDROID_REQUEST_ID)) {
758        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
759        mCurrentRequestId = request_id;
760        ALOGV("%s: Received request with id: %d",__func__, request_id);
761    } else if (mFirstRequest || mCurrentRequestId == -1){
762        ALOGE("%s: Unable to find request id field, \
763                & no previous id available", __func__);
764        return NAME_NOT_FOUND;
765    } else {
766        ALOGV("%s: Re-using old request id", __func__);
767        request_id = mCurrentRequestId;
768    }
769
770
771    ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__,
772                                    request->num_output_buffers);
773    // Acquire all request buffers first
774    for (size_t i = 0; i < request->num_output_buffers; i++) {
775        const camera3_stream_buffer_t& output = request->output_buffers[i];
776        sp<Fence> acquireFence = new Fence(output.acquire_fence);
777
778        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
779        //Call function to store local copy of jpeg data for encode params.
780            rc = getJpegSettings(request->settings);
781            if (rc < 0) {
782                ALOGE("%s: failed to get jpeg parameters", __func__);
783                pthread_mutex_unlock(&mMutex);
784                return rc;
785            }
786        }
787
788        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
789        if (rc != OK) {
790            ALOGE("%s: fence wait failed %d", __func__, rc);
791            pthread_mutex_unlock(&mMutex);
792            return rc;
793        }
794    }
795
796    /* Update pending request list and pending buffers map */
797    pthread_mutex_lock(&mRequestLock);
798    PendingRequestInfo pendingRequest;
799    pendingRequest.frame_number = frameNumber;
800    pendingRequest.num_buffers = request->num_output_buffers;
801    pendingRequest.request_id = request_id;
802
803    for (size_t i = 0; i < request->num_output_buffers; i++) {
804        RequestedBufferInfo requestedBuf;
805        requestedBuf.stream = request->output_buffers[i].stream;
806        requestedBuf.buffer = NULL;
807        pendingRequest.buffers.push_back(requestedBuf);
808
809        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
810    }
811    mPendingRequestsList.push_back(pendingRequest);
812    pthread_mutex_unlock(&mRequestLock);
813
814    // Notify metadata channel we receive a request
815    mMetadataChannel->request(NULL, frameNumber);
816
817    // Call request on other streams
818    for (size_t i = 0; i < request->num_output_buffers; i++) {
819        const camera3_stream_buffer_t& output = request->output_buffers[i];
820        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
821
822        if (channel == NULL) {
823            ALOGE("%s: invalid channel pointer for stream", __func__);
824            continue;
825        }
826
827        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
828            rc = channel->request(output.buffer, frameNumber, mJpegSettings);
829        } else {
830            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
831                __LINE__, output.buffer, frameNumber);
832            rc = channel->request(output.buffer, frameNumber);
833        }
834        if (rc < 0)
835            ALOGE("%s: request failed", __func__);
836    }
837
838    mFirstRequest = false;
839
840    //Block on conditional variable
841    pthread_mutex_lock(&mRequestLock);
842    mPendingRequest = 1;
843    while (mPendingRequest == 1) {
844        pthread_cond_wait(&mRequestCond, &mRequestLock);
845    }
846    pthread_mutex_unlock(&mRequestLock);
847
848    pthread_mutex_unlock(&mMutex);
849    return rc;
850}
851
852/*===========================================================================
853 * FUNCTION   : getMetadataVendorTagOps
854 *
855 * DESCRIPTION:
856 *
857 * PARAMETERS :
858 *
859 *
860 * RETURN     :
861 *==========================================================================*/
862void QCamera3HardwareInterface::getMetadataVendorTagOps(
863                    vendor_tag_query_ops_t* /*ops*/)
864{
865    /* Enable locks when we eventually add Vendor Tags */
866    /*
867    pthread_mutex_lock(&mMutex);
868
869    pthread_mutex_unlock(&mMutex);
870    */
871    return;
872}
873
874/*===========================================================================
875 * FUNCTION   : dump
876 *
877 * DESCRIPTION:
878 *
879 * PARAMETERS :
880 *
881 *
882 * RETURN     :
883 *==========================================================================*/
884void QCamera3HardwareInterface::dump(int /*fd*/)
885{
886    /*Enable lock when we implement this function*/
887    /*
888    pthread_mutex_lock(&mMutex);
889
890    pthread_mutex_unlock(&mMutex);
891    */
892    return;
893}
894
895/*===========================================================================
896 * FUNCTION   : captureResultCb
897 *
898 * DESCRIPTION: Callback handler for all capture result
899 *              (streams, as well as metadata)
900 *
901 * PARAMETERS :
902 *   @metadata : metadata information
903 *   @buffer   : actual gralloc buffer to be returned to frameworks.
904 *               NULL if metadata.
905 *
906 * RETURN     : NONE
907 *==========================================================================*/
908void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
909                camera3_stream_buffer_t *buffer, uint32_t frame_number)
910{
911    pthread_mutex_lock(&mRequestLock);
912
913    if (metadata_buf) {
914        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
915        int32_t frame_number_valid = *(int32_t *)
916            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
917        uint32_t frame_number = *(uint32_t *)
918            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
919        const struct timeval *tv = (const struct timeval *)
920            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
921        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
922            tv->tv_usec * NSEC_PER_USEC;
923
924        if (!frame_number_valid) {
925            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
926            mMetadataChannel->bufDone(metadata_buf);
927            goto done_metadata;
928        }
929        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
930                frame_number, capture_time);
931
932        // Go through the pending requests info and send shutter/results to frameworks
933        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
934                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
935            camera3_capture_result_t result;
936            camera3_notify_msg_t notify_msg;
937            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
938
939            // Flush out all entries with less or equal frame numbers.
940
941            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
942            //Right now it's the same as metadata timestamp
943
944            //TODO: When there is metadata drop, how do we derive the timestamp of
945            //dropped frames? For now, we fake the dropped timestamp by substracting
946            //from the reported timestamp
947            nsecs_t current_capture_time = capture_time -
948                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
949
950            // Send shutter notify to frameworks
951            notify_msg.type = CAMERA3_MSG_SHUTTER;
952            notify_msg.message.shutter.frame_number = i->frame_number;
953            notify_msg.message.shutter.timestamp = current_capture_time;
954            mCallbackOps->notify(mCallbackOps, &notify_msg);
955            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
956                    i->frame_number, capture_time);
957
958            // Send empty metadata with already filled buffers for dropped metadata
959            // and send valid metadata with already filled buffers for current metadata
960            if (i->frame_number < frame_number) {
961                CameraMetadata dummyMetadata;
962                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
963                        &current_capture_time, 1);
964                dummyMetadata.update(ANDROID_REQUEST_ID,
965                        &(i->request_id), 1);
966                result.result = dummyMetadata.release();
967            } else {
968                result.result = translateCbMetadataToResultMetadata(metadata,
969                        current_capture_time, i->request_id);
970                // Return metadata buffer
971                mMetadataChannel->bufDone(metadata_buf);
972            }
973            if (!result.result) {
974                ALOGE("%s: metadata is NULL", __func__);
975            }
976            result.frame_number = i->frame_number;
977            result.num_output_buffers = 0;
978            result.output_buffers = NULL;
979            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
980                    j != i->buffers.end(); j++) {
981                if (j->buffer) {
982                    result.num_output_buffers++;
983                }
984            }
985
986            if (result.num_output_buffers > 0) {
987                camera3_stream_buffer_t *result_buffers =
988                    new camera3_stream_buffer_t[result.num_output_buffers];
989                if (!result_buffers) {
990                    ALOGE("%s: Fatal error: out of memory", __func__);
991                }
992                size_t result_buffers_idx = 0;
993                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
994                        j != i->buffers.end(); j++) {
995                    if (j->buffer) {
996                        result_buffers[result_buffers_idx++] = *(j->buffer);
997                        free(j->buffer);
998                        mPendingBuffersMap.editValueFor(j->stream)--;
999                    }
1000                }
1001                result.output_buffers = result_buffers;
1002
1003                mCallbackOps->process_capture_result(mCallbackOps, &result);
1004                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1005                        __func__, result.frame_number, current_capture_time);
1006                free_camera_metadata((camera_metadata_t *)result.result);
1007                delete[] result_buffers;
1008            } else {
1009                mCallbackOps->process_capture_result(mCallbackOps, &result);
1010                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1011                        __func__, result.frame_number, current_capture_time);
1012                free_camera_metadata((camera_metadata_t *)result.result);
1013            }
1014            // erase the element from the list
1015            i = mPendingRequestsList.erase(i);
1016        }
1017
1018
1019done_metadata:
1020        bool max_buffers_dequeued = false;
1021        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1022            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1023            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1024            if (queued_buffers == stream->max_buffers) {
1025                max_buffers_dequeued = true;
1026                break;
1027            }
1028        }
1029        if (!max_buffers_dequeued) {
1030            // Unblock process_capture_request
1031            mPendingRequest = 0;
1032            pthread_cond_signal(&mRequestCond);
1033        }
1034    } else {
1035        // If the frame number doesn't exist in the pending request list,
1036        // directly send the buffer to the frameworks, and update pending buffers map
1037        // Otherwise, book-keep the buffer.
1038        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1039        while (i != mPendingRequestsList.end() && i->frame_number != frame_number)
1040            i++;
1041        if (i == mPendingRequestsList.end()) {
1042            // Verify all pending requests frame_numbers are greater
1043            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1044                    j != mPendingRequestsList.end(); j++) {
1045                if (j->frame_number < frame_number) {
1046                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1047                            __func__, j->frame_number, frame_number);
1048                }
1049            }
1050            camera3_capture_result_t result;
1051            result.result = NULL;
1052            result.frame_number = frame_number;
1053            result.num_output_buffers = 1;
1054            result.output_buffers = buffer;
1055            ALOGV("%s: result frame_number = %d, buffer = %p",
1056                    __func__, frame_number, buffer);
1057            mPendingBuffersMap.editValueFor(buffer->stream)--;
1058            mCallbackOps->process_capture_result(mCallbackOps, &result);
1059        } else {
1060            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1061                    j != i->buffers.end(); j++) {
1062                if (j->stream == buffer->stream) {
1063                    if (j->buffer != NULL) {
1064                        ALOGE("%s: Error: buffer is already set", __func__);
1065                    } else {
1066                        j->buffer = (camera3_stream_buffer_t *)malloc(
1067                                sizeof(camera3_stream_buffer_t));
1068                        *(j->buffer) = *buffer;
1069                        ALOGV("%s: cache buffer %p at result frame_number %d",
1070                                __func__, buffer, frame_number);
1071                    }
1072                }
1073            }
1074        }
1075    }
1076
1077    pthread_mutex_unlock(&mRequestLock);
1078    return;
1079}
1080
1081/*===========================================================================
1082 * FUNCTION   : translateCbMetadataToResultMetadata
1083 *
1084 * DESCRIPTION:
1085 *
1086 * PARAMETERS :
1087 *   @metadata : metadata information from callback
1088 *
1089 * RETURN     : camera_metadata_t*
1090 *              metadata in a format specified by fwk
1091 *==========================================================================*/
1092camera_metadata_t*
1093QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1094                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1095                                 int32_t request_id)
1096{
1097    CameraMetadata camMetadata;
1098    camera_metadata_t* resultMetadata;
1099
1100    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1101    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1102
1103    /*CAM_INTF_META_HISTOGRAM - TODO*/
1104    /*cam_hist_stats_t  *histogram =
1105      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1106      metadata);*/
1107
1108    /*face detection*/
1109    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1110        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1111    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1112    int32_t faceIds[numFaces];
1113    uint8_t faceScores[numFaces];
1114    int32_t faceRectangles[numFaces * 4];
1115    int32_t faceLandmarks[numFaces * 6];
1116    int j = 0, k = 0;
1117    for (int i = 0; i < numFaces; i++) {
1118        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1119        faceScores[i] = faceDetectionInfo->faces[i].score;
1120        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1121                faceRectangles+j, -1);
1122        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1123        j+= 4;
1124        k+= 6;
1125    }
1126    if (numFaces > 0) {
1127        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1128        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1129        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1130            faceRectangles, numFaces*4);
1131        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1132            faceLandmarks, numFaces*6);
1133    }
1134
1135    uint8_t  *color_correct_mode =
1136        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1137    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1138
1139    int32_t  *ae_precapture_id =
1140        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1141    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1142
1143    /*aec regions*/
1144    cam_area_t  *hAeRegions =
1145        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1146    int32_t aeRegions[5];
1147    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1148    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1149
1150    uint8_t  *ae_state =
1151        (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1152    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1153
1154    uint8_t  *focusMode =
1155        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1156    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1157
1158    /*af regions*/
1159    cam_area_t  *hAfRegions =
1160        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1161    int32_t afRegions[5];
1162    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1163    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1164
1165    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1166    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1167
1168    int32_t  *afTriggerId =
1169        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1170    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1171
1172    uint8_t  *whiteBalance =
1173        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1174    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1175
1176    /*awb regions*/
1177    cam_area_t  *hAwbRegions =
1178        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1179    int32_t awbRegions[5];
1180    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1181    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1182
1183    uint8_t  *whiteBalanceState =
1184        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1185    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1186
1187    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1188    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1189
1190    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1191    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1192
1193    uint8_t  *flashPower =
1194        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1195    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1196
1197    int64_t  *flashFiringTime =
1198        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1199    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1200
1201    /*int32_t  *ledMode =
1202      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1203      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1204
1205    uint8_t  *flashState =
1206        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1207    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1208
1209    uint8_t  *hotPixelMode =
1210        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1211    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1212
1213    float  *lensAperture =
1214        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1215    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1216
1217    float  *filterDensity =
1218        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1219    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1220
1221    float  *focalLength =
1222        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1223    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1224
1225    float  *focusDistance =
1226        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1227    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1228
1229    float  *focusRange =
1230        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1231    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1232
1233    uint8_t  *opticalStab =
1234        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1235    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1236
1237    /*int32_t  *focusState =
1238      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1239      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1240
1241    uint8_t  *noiseRedMode =
1242        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1243    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1244
1245    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1246
1247    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1248        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1249    int32_t scalerCropRegion[4];
1250    scalerCropRegion[0] = hScalerCropRegion->left;
1251    scalerCropRegion[1] = hScalerCropRegion->top;
1252    scalerCropRegion[2] = hScalerCropRegion->width;
1253    scalerCropRegion[3] = hScalerCropRegion->height;
1254    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1255
1256    int64_t  *sensorExpTime =
1257        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1258    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1259
1260    int64_t  *sensorFameDuration =
1261        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1262    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1263
1264    int32_t  *sensorSensitivity =
1265        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1266    mMetadataResponse.iso_speed = *sensorSensitivity;
1267    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1268
1269    uint8_t  *shadingMode =
1270        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1271    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1272
1273    uint8_t  *faceDetectMode =
1274        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1275    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1276
1277    uint8_t  *histogramMode =
1278        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1279    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1280
1281    uint8_t  *sharpnessMapMode =
1282        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1283    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1284            sharpnessMapMode, 1);
1285
1286    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1287    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1288        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1289    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1290            (int32_t*)sharpnessMap->sharpness,
1291            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1292
1293    resultMetadata = camMetadata.release();
1294    return resultMetadata;
1295}
1296
1297/*===========================================================================
1298 * FUNCTION   : convertToRegions
1299 *
1300 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1301 *
1302 * PARAMETERS :
1303 *   @rect   : cam_rect_t struct to convert
1304 *   @region : int32_t destination array
1305 *   @weight : if we are converting from cam_area_t, weight is valid
1306 *             else weight = -1
1307 *
1308 *==========================================================================*/
1309void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1310    region[0] = rect.left;
1311    region[1] = rect.top;
1312    region[2] = rect.left + rect.width;
1313    region[3] = rect.top + rect.height;
1314    if (weight > -1) {
1315        region[4] = weight;
1316    }
1317}
1318
1319/*===========================================================================
1320 * FUNCTION   : convertFromRegions
1321 *
1322 * DESCRIPTION: helper method to convert from array to cam_rect_t
1323 *
1324 * PARAMETERS :
1325 *   @rect   : cam_rect_t struct to convert
1326 *   @region : int32_t destination array
1327 *   @weight : if we are converting from cam_area_t, weight is valid
1328 *             else weight = -1
1329 *
1330 *==========================================================================*/
1331void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1332                                                   const camera_metadata_t *settings,
1333                                                   uint32_t tag){
1334    CameraMetadata frame_settings;
1335    frame_settings = settings;
1336    int32_t x_min = frame_settings.find(tag).data.i32[0];
1337    int32_t y_min = frame_settings.find(tag).data.i32[1];
1338    int32_t x_max = frame_settings.find(tag).data.i32[2];
1339    int32_t y_max = frame_settings.find(tag).data.i32[3];
1340    roi->weight = frame_settings.find(tag).data.i32[4];
1341    roi->rect.left = x_min;
1342    roi->rect.top = y_min;
1343    roi->rect.width = x_max - x_min;
1344    roi->rect.height = y_max - y_min;
1345}
1346
1347/*===========================================================================
1348 * FUNCTION   : resetIfNeededROI
1349 *
1350 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1351 *              crop region
1352 *
1353 * PARAMETERS :
1354 *   @roi       : cam_area_t struct to resize
1355 *   @scalerCropRegion : cam_crop_region_t region to compare against
1356 *
1357 *
1358 *==========================================================================*/
1359bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1360                                                 const cam_crop_region_t* scalerCropRegion)
1361{
1362    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1363    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1364    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1365    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1366    if ((roi_x_max < scalerCropRegion->left) ||
1367        (roi_y_max < scalerCropRegion->top)  ||
1368        (roi->rect.left > crop_x_max) ||
1369        (roi->rect.top > crop_y_max)){
1370        return false;
1371    }
1372    if (roi->rect.left < scalerCropRegion->left) {
1373        roi->rect.left = scalerCropRegion->left;
1374    }
1375    if (roi->rect.top < scalerCropRegion->top) {
1376        roi->rect.top = scalerCropRegion->top;
1377    }
1378    if (roi_x_max > crop_x_max) {
1379        roi_x_max = crop_x_max;
1380    }
1381    if (roi_y_max > crop_y_max) {
1382        roi_y_max = crop_y_max;
1383    }
1384    roi->rect.width = roi_x_max - roi->rect.left;
1385    roi->rect.height = roi_y_max - roi->rect.top;
1386    return true;
1387}
1388
1389/*===========================================================================
1390 * FUNCTION   : convertLandmarks
1391 *
1392 * DESCRIPTION: helper method to extract the landmarks from face detection info
1393 *
1394 * PARAMETERS :
1395 *   @face   : cam_rect_t struct to convert
1396 *   @landmarks : int32_t destination array
1397 *
1398 *
1399 *==========================================================================*/
1400void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1401{
1402    landmarks[0] = face.left_eye_center.x;
1403    landmarks[1] = face.left_eye_center.y;
1404    landmarks[2] = face.right_eye_center.y;
1405    landmarks[3] = face.right_eye_center.y;
1406    landmarks[4] = face.mouth_center.x;
1407    landmarks[5] = face.mouth_center.y;
1408}
1409
1410#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1411/*===========================================================================
1412 * FUNCTION   : initCapabilities
1413 *
1414 * DESCRIPTION: initialize camera capabilities in static data struct
1415 *
1416 * PARAMETERS :
1417 *   @cameraId  : camera Id
1418 *
1419 * RETURN     : int32_t type of status
1420 *              NO_ERROR  -- success
1421 *              none-zero failure code
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::initCapabilities(int cameraId)
1424{
1425    int rc = 0;
1426    mm_camera_vtbl_t *cameraHandle = NULL;
1427    QCamera3HeapMemory *capabilityHeap = NULL;
1428
1429    cameraHandle = camera_open(cameraId);
1430    if (!cameraHandle) {
1431        ALOGE("%s: camera_open failed", __func__);
1432        rc = -1;
1433        goto open_failed;
1434    }
1435
1436    capabilityHeap = new QCamera3HeapMemory();
1437    if (capabilityHeap == NULL) {
1438        ALOGE("%s: creation of capabilityHeap failed", __func__);
1439        goto heap_creation_failed;
1440    }
1441    /* Allocate memory for capability buffer */
1442    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1443    if(rc != OK) {
1444        ALOGE("%s: No memory for cappability", __func__);
1445        goto allocate_failed;
1446    }
1447
1448    /* Map memory for capability buffer */
1449    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1450    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1451                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1452                                capabilityHeap->getFd(0),
1453                                sizeof(cam_capability_t));
1454    if(rc < 0) {
1455        ALOGE("%s: failed to map capability buffer", __func__);
1456        goto map_failed;
1457    }
1458
1459    /* Query Capability */
1460    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1461    if(rc < 0) {
1462        ALOGE("%s: failed to query capability",__func__);
1463        goto query_failed;
1464    }
1465    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1466    if (!gCamCapability[cameraId]) {
1467        ALOGE("%s: out of memory", __func__);
1468        goto query_failed;
1469    }
1470    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1471                                        sizeof(cam_capability_t));
1472    rc = 0;
1473
1474query_failed:
1475    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1476                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1477map_failed:
1478    capabilityHeap->deallocate();
1479allocate_failed:
1480    delete capabilityHeap;
1481heap_creation_failed:
1482    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1483    cameraHandle = NULL;
1484open_failed:
1485    return rc;
1486}
1487
1488/*===========================================================================
1489 * FUNCTION   : initParameters
1490 *
1491 * DESCRIPTION: initialize camera parameters
1492 *
1493 * PARAMETERS :
1494 *
1495 * RETURN     : int32_t type of status
1496 *              NO_ERROR  -- success
1497 *              none-zero failure code
1498 *==========================================================================*/
1499int QCamera3HardwareInterface::initParameters()
1500{
1501    int rc = 0;
1502
1503    //Allocate Set Param Buffer
1504    mParamHeap = new QCamera3HeapMemory();
1505    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1506    if(rc != OK) {
1507        rc = NO_MEMORY;
1508        ALOGE("Failed to allocate SETPARM Heap memory");
1509        delete mParamHeap;
1510        mParamHeap = NULL;
1511        return rc;
1512    }
1513
1514    //Map memory for parameters buffer
1515    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1516            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1517            mParamHeap->getFd(0),
1518            sizeof(parm_buffer_t));
1519    if(rc < 0) {
1520        ALOGE("%s:failed to map SETPARM buffer",__func__);
1521        rc = FAILED_TRANSACTION;
1522        mParamHeap->deallocate();
1523        delete mParamHeap;
1524        mParamHeap = NULL;
1525        return rc;
1526    }
1527
1528    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1529    return rc;
1530}
1531
1532/*===========================================================================
1533 * FUNCTION   : deinitParameters
1534 *
1535 * DESCRIPTION: de-initialize camera parameters
1536 *
1537 * PARAMETERS :
1538 *
1539 * RETURN     : NONE
1540 *==========================================================================*/
1541void QCamera3HardwareInterface::deinitParameters()
1542{
1543    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1544            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1545
1546    mParamHeap->deallocate();
1547    delete mParamHeap;
1548    mParamHeap = NULL;
1549
1550    mParameters = NULL;
1551}
1552
1553/*===========================================================================
1554 * FUNCTION   : calcMaxJpegSize
1555 *
1556 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1557 *
1558 * PARAMETERS :
1559 *
1560 * RETURN     : max_jpeg_size
1561 *==========================================================================*/
1562int QCamera3HardwareInterface::calcMaxJpegSize()
1563{
1564    int32_t max_jpeg_size = 0;
1565    int temp_width, temp_height;
1566    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1567        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1568        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1569        if (temp_width * temp_height > max_jpeg_size ) {
1570            max_jpeg_size = temp_width * temp_height;
1571        }
1572    }
1573    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1574    return max_jpeg_size;
1575}
1576
1577/*===========================================================================
1578 * FUNCTION   : initStaticMetadata
1579 *
1580 * DESCRIPTION: initialize the static metadata
1581 *
1582 * PARAMETERS :
1583 *   @cameraId  : camera Id
1584 *
1585 * RETURN     : int32_t type of status
1586 *              0  -- success
1587 *              non-zero failure code
1588 *==========================================================================*/
1589int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1590{
1591    int rc = 0;
1592    CameraMetadata staticInfo;
1593    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1594    /*HAL 3 only*/
1595    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1596                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1597
1598    /*hard coded for now but this should come from sensor*/
1599    float min_focus_distance;
1600    if(facingBack){
1601        min_focus_distance = 10;
1602    } else {
1603        min_focus_distance = 0;
1604    }
1605    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1606                    &min_focus_distance, 1);
1607
1608    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1609                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1610
1611    /*should be using focal lengths but sensor doesn't provide that info now*/
1612    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1613                      &gCamCapability[cameraId]->focal_length,
1614                      1);
1615
1616    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1617                      gCamCapability[cameraId]->apertures,
1618                      gCamCapability[cameraId]->apertures_count);
1619
1620    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1621                gCamCapability[cameraId]->filter_densities,
1622                gCamCapability[cameraId]->filter_densities_count);
1623
1624
1625    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1626                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1627                      gCamCapability[cameraId]->optical_stab_modes_count);
1628
1629    staticInfo.update(ANDROID_LENS_POSITION,
1630                      gCamCapability[cameraId]->lens_position,
1631                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1632
1633    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1634                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1635    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1636                      lens_shading_map_size,
1637                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1638
1639    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map,
1640            sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float));
1641
1642    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1643                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1644    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1645            geo_correction_map_size,
1646            sizeof(geo_correction_map_size)/sizeof(int32_t));
1647
1648    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1649                       gCamCapability[cameraId]->geo_correction_map,
1650                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1651
1652    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1653            gCamCapability[cameraId]->sensor_physical_size, 2);
1654
1655    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1656            gCamCapability[cameraId]->exposure_time_range, 2);
1657
1658    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1659            &gCamCapability[cameraId]->max_frame_duration, 1);
1660
1661
1662    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1663                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1664
1665    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1666                                               gCamCapability[cameraId]->pixel_array_size.height};
1667    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1668                      pixel_array_size, 2);
1669
1670    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width,
1671                                                gCamCapability[cameraId]->active_array_size.height};
1672
1673    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1674                      active_array_size, 2);
1675
1676    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1677            &gCamCapability[cameraId]->white_level, 1);
1678
1679    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1680            gCamCapability[cameraId]->black_level_pattern, 4);
1681
1682    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1683                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1684
1685    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1686                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1687
1688    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1689                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1690    /*hardcode 0 for now*/
1691    int32_t max_face_count = 0;
1692    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1693                      &max_face_count, 1);
1694
1695    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1696                      &gCamCapability[cameraId]->histogram_size, 1);
1697
1698    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1699            &gCamCapability[cameraId]->max_histogram_count, 1);
1700
1701    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1702                                                gCamCapability[cameraId]->sharpness_map_size.height};
1703
1704    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1705            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1706
1707    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1708            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1709
1710
1711    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1712                      &gCamCapability[cameraId]->raw_min_duration,
1713                       1);
1714
1715    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888};
1716    int scalar_formats_count = 1;
1717    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1718                      scalar_formats,
1719                      scalar_formats_count);
1720
1721    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1722    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1723              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1724              available_processed_sizes);
1725    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1726                available_processed_sizes,
1727                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1728
1729    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1730    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1731                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1732                 available_fps_ranges);
1733    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1734            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1735
1736    camera_metadata_rational exposureCompensationStep = {
1737            gCamCapability[cameraId]->exp_compensation_step.numerator,
1738            gCamCapability[cameraId]->exp_compensation_step.denominator};
1739    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1740                      &exposureCompensationStep, 1);
1741
1742    /*TO DO*/
1743    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1744    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1745                      availableVstabModes, sizeof(availableVstabModes));
1746
1747    /*HAL 1 and HAL 3 common*/
1748    float maxZoom = 10;
1749    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1750            &maxZoom, 1);
1751
1752    int32_t max3aRegions = 1;
1753    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1754            &max3aRegions, 1);
1755
1756    uint8_t availableFaceDetectModes[] = {
1757            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1758    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1759                      availableFaceDetectModes,
1760                      sizeof(availableFaceDetectModes));
1761
1762    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1763                                       gCamCapability[cameraId]->raw_dim.height};
1764    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1765                      raw_size,
1766                      sizeof(raw_size)/sizeof(uint32_t));
1767
1768    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1769                                                        gCamCapability[cameraId]->exposure_compensation_max};
1770    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1771            exposureCompensationRange,
1772            sizeof(exposureCompensationRange)/sizeof(int32_t));
1773
1774    uint8_t lensFacing = (facingBack) ?
1775            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1776    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1777
1778    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1779    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1780              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1781              available_jpeg_sizes);
1782    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1783                available_jpeg_sizes,
1784                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1785
1786    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1787                      available_thumbnail_sizes,
1788                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1789
1790    int32_t max_jpeg_size = 0;
1791    int temp_width, temp_height;
1792    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1793        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1794        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1795        if (temp_width * temp_height > max_jpeg_size ) {
1796            max_jpeg_size = temp_width * temp_height;
1797        }
1798    }
1799    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1800    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1801                      &max_jpeg_size, 1);
1802
1803    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1804    int32_t size = 0;
1805    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1806        int val = lookupFwkName(EFFECT_MODES_MAP,
1807                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1808                                   gCamCapability[cameraId]->supported_effects[i]);
1809        if (val != NAME_NOT_FOUND) {
1810            avail_effects[size] = (uint8_t)val;
1811            size++;
1812        }
1813    }
1814    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1815                      avail_effects,
1816                      size);
1817
1818    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1819    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1820    int32_t supported_scene_modes_cnt = 0;
1821    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1822        int val = lookupFwkName(SCENE_MODES_MAP,
1823                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1824                                gCamCapability[cameraId]->supported_scene_modes[i]);
1825        if (val != NAME_NOT_FOUND) {
1826            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1827            supported_indexes[supported_scene_modes_cnt] = i;
1828            supported_scene_modes_cnt++;
1829        }
1830    }
1831
1832    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1833                      avail_scene_modes,
1834                      supported_scene_modes_cnt);
1835
1836    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1837    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1838                      supported_scene_modes_cnt,
1839                      scene_mode_overrides,
1840                      supported_indexes);
1841    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1842                      scene_mode_overrides,
1843                      supported_scene_modes_cnt*3);
1844
1845    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1846    size = 0;
1847    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1848        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1849                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1850                                 gCamCapability[cameraId]->supported_antibandings[i]);
1851        if (val != NAME_NOT_FOUND) {
1852            avail_antibanding_modes[size] = (uint8_t)val;
1853            size++;
1854        }
1855
1856    }
1857    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1858                      avail_antibanding_modes,
1859                      size);
1860
1861    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1862    size = 0;
1863    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1864        int val = lookupFwkName(FOCUS_MODES_MAP,
1865                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1866                                gCamCapability[cameraId]->supported_focus_modes[i]);
1867        if (val != NAME_NOT_FOUND) {
1868            avail_af_modes[size] = (uint8_t)val;
1869            size++;
1870        }
1871    }
1872    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1873                      avail_af_modes,
1874                      size);
1875
1876    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1877    size = 0;
1878    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1879        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1880                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1881                                    gCamCapability[cameraId]->supported_white_balances[i]);
1882        if (val != NAME_NOT_FOUND) {
1883            avail_awb_modes[size] = (uint8_t)val;
1884            size++;
1885        }
1886    }
1887    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1888                      avail_awb_modes,
1889                      size);
1890
1891    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1892    size = 0;
1893    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1894        int val = lookupFwkName(FLASH_MODES_MAP,
1895                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1896                                gCamCapability[cameraId]->supported_flash_modes[i]);
1897        if (val != NAME_NOT_FOUND) {
1898            avail_flash_modes[size] = (uint8_t)val;
1899            size++;
1900        }
1901    }
1902    static uint8_t flashAvailable = 0;
1903    if (size > 1) {
1904        //flash is supported
1905        flashAvailable = 1;
1906    }
1907    staticInfo.update(ANDROID_FLASH_MODE,
1908                      avail_flash_modes,
1909                      size);
1910
1911    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
1912            &flashAvailable, 1);
1913
1914    uint8_t avail_ae_modes[5];
1915    size = 0;
1916    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
1917        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
1918        size++;
1919    }
1920    if (flashAvailable) {
1921        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
1922        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
1923        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
1924    }
1925    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1926                      avail_ae_modes,
1927                      size);
1928    size = 0;
1929    int32_t avail_sensitivities[CAM_ISO_MODE_MAX];
1930    for (int i = 0; i < gCamCapability[cameraId]->supported_iso_modes_cnt; i++) {
1931        int32_t sensitivity = getSensorSensitivity(gCamCapability[cameraId]->supported_iso_modes[i]);
1932        if (sensitivity != -1) {
1933            avail_sensitivities[size] = sensitivity;
1934            size++;
1935        }
1936    }
1937    staticInfo.update(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES,
1938                      avail_sensitivities,
1939                      size);
1940
1941    gStaticMetadata[cameraId] = staticInfo.release();
1942    return rc;
1943}
1944
1945/*===========================================================================
1946 * FUNCTION   : makeTable
1947 *
1948 * DESCRIPTION: make a table of sizes
1949 *
1950 * PARAMETERS :
1951 *
1952 *
1953 *==========================================================================*/
1954void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
1955                                          int32_t* sizeTable)
1956{
1957    int j = 0;
1958    for (int i = 0; i < size; i++) {
1959        sizeTable[j] = dimTable[i].width;
1960        sizeTable[j+1] = dimTable[i].height;
1961        j+=2;
1962    }
1963}
1964
1965/*===========================================================================
1966 * FUNCTION   : makeFPSTable
1967 *
1968 * DESCRIPTION: make a table of fps ranges
1969 *
1970 * PARAMETERS :
1971 *
1972 *==========================================================================*/
1973void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
1974                                          int32_t* fpsRangesTable)
1975{
1976    int j = 0;
1977    for (int i = 0; i < size; i++) {
1978        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
1979        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
1980        j+=2;
1981    }
1982}
1983
1984/*===========================================================================
1985 * FUNCTION   : makeOverridesList
1986 *
1987 * DESCRIPTION: make a list of scene mode overrides
1988 *
1989 * PARAMETERS :
1990 *
1991 *
1992 *==========================================================================*/
1993void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
1994                                                  uint8_t size, uint8_t* overridesList,
1995                                                  uint8_t* supported_indexes)
1996{
1997    /*daemon will give a list of overrides for all scene modes.
1998      However we should send the fwk only the overrides for the scene modes
1999      supported by the framework*/
2000    int j = 0, index = 0;
2001    for (int i = 0; i < size; i++) {
2002        index = supported_indexes[i];
2003        overridesList[j] = (int32_t)overridesTable[index].ae_mode;
2004        overridesList[j+1] = (int32_t)overridesTable[index].awb_mode;
2005        overridesList[j+2] = (int32_t)overridesTable[index].af_mode;
2006        j+=3;
2007    }
2008}
2009
2010/*===========================================================================
2011 * FUNCTION   : getPreviewHalPixelFormat
2012 *
2013 * DESCRIPTION: convert the format to type recognized by framework
2014 *
2015 * PARAMETERS : format : the format from backend
2016 *
2017 ** RETURN    : format recognized by framework
2018 *
2019 *==========================================================================*/
2020int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2021{
2022    int32_t halPixelFormat;
2023
2024    switch (format) {
2025    case CAM_FORMAT_YUV_420_NV12:
2026        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2027        break;
2028    case CAM_FORMAT_YUV_420_NV21:
2029        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2030        break;
2031    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2032        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2033        break;
2034    case CAM_FORMAT_YUV_420_YV12:
2035        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2036        break;
2037    case CAM_FORMAT_YUV_422_NV16:
2038    case CAM_FORMAT_YUV_422_NV61:
2039    default:
2040        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2041        break;
2042    }
2043    return halPixelFormat;
2044}
2045
2046/*===========================================================================
2047 * FUNCTION   : getSensorSensitivity
2048 *
2049 * DESCRIPTION: convert iso_mode to an integer value
2050 *
2051 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2052 *
2053 ** RETURN    : sensitivity supported by sensor
2054 *
2055 *==========================================================================*/
2056int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2057{
2058    int32_t sensitivity;
2059
2060    switch (iso_mode) {
2061    case CAM_ISO_MODE_100:
2062        sensitivity = 100;
2063        break;
2064    case CAM_ISO_MODE_200:
2065        sensitivity = 200;
2066        break;
2067    case CAM_ISO_MODE_400:
2068        sensitivity = 400;
2069        break;
2070    case CAM_ISO_MODE_800:
2071        sensitivity = 800;
2072        break;
2073    case CAM_ISO_MODE_1600:
2074        sensitivity = 1600;
2075        break;
2076    default:
2077        sensitivity = -1;
2078        break;
2079    }
2080    return sensitivity;
2081}
2082
2083
2084/*===========================================================================
2085 * FUNCTION   : AddSetParmEntryToBatch
2086 *
2087 * DESCRIPTION: add set parameter entry into batch
2088 *
2089 * PARAMETERS :
2090 *   @p_table     : ptr to parameter buffer
2091 *   @paramType   : parameter type
2092 *   @paramLength : length of parameter value
2093 *   @paramValue  : ptr to parameter value
2094 *
2095 * RETURN     : int32_t type of status
2096 *              NO_ERROR  -- success
2097 *              none-zero failure code
2098 *==========================================================================*/
2099int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2100                                                          cam_intf_parm_type_t paramType,
2101                                                          uint32_t paramLength,
2102                                                          void *paramValue)
2103{
2104    int position = paramType;
2105    int current, next;
2106
2107    /*************************************************************************
2108    *                 Code to take care of linking next flags                *
2109    *************************************************************************/
2110    current = GET_FIRST_PARAM_ID(p_table);
2111    if (position == current){
2112        //DO NOTHING
2113    } else if (position < current){
2114        SET_NEXT_PARAM_ID(position, p_table, current);
2115        SET_FIRST_PARAM_ID(p_table, position);
2116    } else {
2117        /* Search for the position in the linked list where we need to slot in*/
2118        while (position > GET_NEXT_PARAM_ID(current, p_table))
2119            current = GET_NEXT_PARAM_ID(current, p_table);
2120
2121        /*If node already exists no need to alter linking*/
2122        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2123            next = GET_NEXT_PARAM_ID(current, p_table);
2124            SET_NEXT_PARAM_ID(current, p_table, position);
2125            SET_NEXT_PARAM_ID(position, p_table, next);
2126        }
2127    }
2128
2129    /*************************************************************************
2130    *                   Copy contents into entry                             *
2131    *************************************************************************/
2132
2133    if (paramLength > sizeof(parm_type_t)) {
2134        ALOGE("%s:Size of input larger than max entry size",__func__);
2135        return BAD_VALUE;
2136    }
2137    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2138    return NO_ERROR;
2139}
2140
2141/*===========================================================================
2142 * FUNCTION   : lookupFwkName
2143 *
2144 * DESCRIPTION: In case the enum is not same in fwk and backend
2145 *              make sure the parameter is correctly propogated
2146 *
2147 * PARAMETERS  :
2148 *   @arr      : map between the two enums
2149 *   @len      : len of the map
2150 *   @hal_name : name of the hal_parm to map
2151 *
2152 * RETURN     : int type of status
2153 *              fwk_name  -- success
2154 *              none-zero failure code
2155 *==========================================================================*/
2156int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2157                                             int len, int hal_name)
2158{
2159
2160    for (int i = 0; i < len; i++) {
2161        if (arr[i].hal_name == hal_name)
2162            return arr[i].fwk_name;
2163    }
2164
2165    /* Not able to find matching framework type is not necessarily
2166     * an error case. This happens when mm-camera supports more attributes
2167     * than the frameworks do */
2168    ALOGD("%s: Cannot find matching framework type", __func__);
2169    return NAME_NOT_FOUND;
2170}
2171
2172/*===========================================================================
2173 * FUNCTION   : lookupHalName
2174 *
2175 * DESCRIPTION: In case the enum is not same in fwk and backend
2176 *              make sure the parameter is correctly propogated
2177 *
2178 * PARAMETERS  :
2179 *   @arr      : map between the two enums
2180 *   @len      : len of the map
2181 *   @fwk_name : name of the hal_parm to map
2182 *
2183 * RETURN     : int32_t type of status
2184 *              hal_name  -- success
2185 *              none-zero failure code
2186 *==========================================================================*/
2187int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2188                                             int len, int fwk_name)
2189{
2190    for (int i = 0; i < len; i++) {
2191       if (arr[i].fwk_name == fwk_name)
2192           return arr[i].hal_name;
2193    }
2194    ALOGE("%s: Cannot find matching hal type", __func__);
2195    return NAME_NOT_FOUND;
2196}
2197
2198/*===========================================================================
2199 * FUNCTION   : getCapabilities
2200 *
2201 * DESCRIPTION: query camera capabilities
2202 *
2203 * PARAMETERS :
2204 *   @cameraId  : camera Id
2205 *   @info      : camera info struct to be filled in with camera capabilities
2206 *
2207 * RETURN     : int32_t type of status
2208 *              NO_ERROR  -- success
2209 *              none-zero failure code
2210 *==========================================================================*/
2211int QCamera3HardwareInterface::getCamInfo(int cameraId,
2212                                    struct camera_info *info)
2213{
2214    int rc = 0;
2215
2216    if (NULL == gCamCapability[cameraId]) {
2217        rc = initCapabilities(cameraId);
2218        if (rc < 0) {
2219            //pthread_mutex_unlock(&g_camlock);
2220            return rc;
2221        }
2222    }
2223
2224    if (NULL == gStaticMetadata[cameraId]) {
2225        rc = initStaticMetadata(cameraId);
2226        if (rc < 0) {
2227            return rc;
2228        }
2229    }
2230
2231    switch(gCamCapability[cameraId]->position) {
2232    case CAM_POSITION_BACK:
2233        info->facing = CAMERA_FACING_BACK;
2234        break;
2235
2236    case CAM_POSITION_FRONT:
2237        info->facing = CAMERA_FACING_FRONT;
2238        break;
2239
2240    default:
2241        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2242        rc = -1;
2243        break;
2244    }
2245
2246
2247    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2248    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2249    info->static_camera_characteristics = gStaticMetadata[cameraId];
2250
2251    return rc;
2252}
2253
2254/*===========================================================================
2255 * FUNCTION   : translateMetadata
2256 *
2257 * DESCRIPTION: translate the metadata into camera_metadata_t
2258 *
2259 * PARAMETERS : type of the request
2260 *
2261 *
2262 * RETURN     : success: camera_metadata_t*
2263 *              failure: NULL
2264 *
2265 *==========================================================================*/
2266camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2267{
2268    pthread_mutex_lock(&mMutex);
2269
2270    if (mDefaultMetadata[type] != NULL) {
2271        pthread_mutex_unlock(&mMutex);
2272        return mDefaultMetadata[type];
2273    }
2274    //first time we are handling this request
2275    //fill up the metadata structure using the wrapper class
2276    CameraMetadata settings;
2277    //translate from cam_capability_t to camera_metadata_tag_t
2278    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2279    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2280
2281    /*control*/
2282
2283    uint8_t controlIntent = 0;
2284    switch (type) {
2285      case CAMERA3_TEMPLATE_PREVIEW:
2286        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2287        break;
2288      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2289        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2290        break;
2291      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2292        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2293        break;
2294      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2295        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2296        break;
2297      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2298        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2299        break;
2300      default:
2301        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2302        break;
2303    }
2304    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2305
2306    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2307            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2308
2309    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2310    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2311
2312    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2313    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2314
2315    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2316    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2317
2318    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2319    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2320
2321    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2322    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2323
2324    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2325    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2326
2327    static uint8_t focusMode;
2328    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2329        ALOGE("%s: Setting focus mode to auto", __func__);
2330        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2331    } else {
2332        ALOGE("%s: Setting focus mode to off", __func__);
2333        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2334    }
2335    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2336
2337    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2338    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2339
2340    /*flash*/
2341    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2342    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2343
2344
2345    /* lens */
2346    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2347    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2348
2349    if (gCamCapability[mCameraId]->filter_densities_count) {
2350        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2351        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2352                        gCamCapability[mCameraId]->filter_densities_count);
2353    }
2354
2355    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2356    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2357
2358    mDefaultMetadata[type] = settings.release();
2359
2360    pthread_mutex_unlock(&mMutex);
2361    return mDefaultMetadata[type];
2362}
2363
2364/*===========================================================================
2365 * FUNCTION   : setFrameParameters
2366 *
2367 * DESCRIPTION: set parameters per frame as requested in the metadata from
2368 *              framework
2369 *
2370 * PARAMETERS :
2371 *   @settings  : frame settings information from framework
2372 *
2373 *
2374 * RETURN     : success: NO_ERROR
2375 *              failure:
2376 *==========================================================================*/
2377int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2378                                                  const camera_metadata_t *settings)
2379{
2380    /*translate from camera_metadata_t type to parm_type_t*/
2381    int rc = 0;
2382    if (settings == NULL && mFirstRequest) {
2383        /*settings cannot be null for the first request*/
2384        return BAD_VALUE;
2385    }
2386
2387    int32_t hal_version = CAM_HAL_V3;
2388
2389    memset(mParameters, 0, sizeof(parm_buffer_t));
2390    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2391    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2392                sizeof(hal_version), &hal_version);
2393
2394    /*we need to update the frame number in the parameters*/
2395    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2396                                sizeof(frame_id), &frame_id);
2397    if (rc < 0) {
2398        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2399        return BAD_VALUE;
2400    }
2401
2402    if(settings != NULL){
2403        rc = translateMetadataToParameters(settings);
2404    }
2405    /*set the parameters to backend*/
2406    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2407    return rc;
2408}
2409
2410/*===========================================================================
2411 * FUNCTION   : translateMetadataToParameters
2412 *
2413 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2414 *
2415 *
2416 * PARAMETERS :
2417 *   @settings  : frame settings information from framework
2418 *
2419 *
2420 * RETURN     : success: NO_ERROR
2421 *              failure:
2422 *==========================================================================*/
2423int QCamera3HardwareInterface::translateMetadataToParameters
2424                                  (const camera_metadata_t *settings)
2425{
2426    int rc = 0;
2427    CameraMetadata frame_settings;
2428    frame_settings = settings;
2429
2430
2431    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2432        int32_t antibandingMode =
2433            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2434        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2435                sizeof(antibandingMode), &antibandingMode);
2436    }
2437
2438    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2439        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2440        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2441          sizeof(expCompensation), &expCompensation);
2442    }
2443
2444    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2445        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2446        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2447                sizeof(aeLock), &aeLock);
2448    }
2449
2450    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2451        cam_fps_range_t fps_range;
2452        fps_range.min_fps =
2453            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2454        fps_range.max_fps =
2455            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2456        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2457                sizeof(fps_range), &fps_range);
2458    }
2459
2460    float focalDistance = -1.0;
2461    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2462        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2463        rc = AddSetParmEntryToBatch(mParameters,
2464                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2465                sizeof(focalDistance), &focalDistance);
2466    }
2467
2468    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2469        uint8_t fwk_focusMode =
2470            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2471        uint8_t focusMode;
2472        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2473            focusMode = CAM_FOCUS_MODE_INFINITY;
2474        } else{
2475         focusMode = lookupHalName(FOCUS_MODES_MAP,
2476                                   sizeof(FOCUS_MODES_MAP),
2477                                   fwk_focusMode);
2478        }
2479        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2480                sizeof(focusMode), &focusMode);
2481    }
2482
2483    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2484        uint8_t awbLock =
2485            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2486        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2487                sizeof(awbLock), &awbLock);
2488    }
2489
2490    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2491        uint8_t fwk_whiteLevel =
2492            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2493        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2494                sizeof(WHITE_BALANCE_MODES_MAP),
2495                fwk_whiteLevel);
2496        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2497                sizeof(whiteLevel), &whiteLevel);
2498    }
2499
2500    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2501        uint8_t fwk_effectMode =
2502            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2503        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2504                sizeof(EFFECT_MODES_MAP),
2505                fwk_effectMode);
2506        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2507                sizeof(effectMode), &effectMode);
2508    }
2509
2510    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2511        uint8_t fwk_aeMode =
2512            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2513        uint8_t aeMode;
2514        int32_t redeye;
2515        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2516            aeMode = CAM_AE_MODE_OFF;
2517        } else {
2518            aeMode = CAM_AE_MODE_ON;
2519        }
2520        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2521            redeye = 1;
2522        } else {
2523            redeye = 0;
2524        }
2525        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2526                                          sizeof(AE_FLASH_MODE_MAP),
2527                                          aeMode);
2528        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2529                sizeof(aeMode), &aeMode);
2530        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2531                sizeof(flashMode), &flashMode);
2532        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2533                sizeof(redeye), &redeye);
2534    }
2535
2536    if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) {
2537        int32_t metaFrameNumber =
2538            frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0];
2539        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2540                sizeof(metaFrameNumber), &metaFrameNumber);
2541    }
2542
2543    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2544        uint8_t colorCorrectMode =
2545            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2546        rc =
2547            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2548                    sizeof(colorCorrectMode), &colorCorrectMode);
2549    }
2550    cam_trigger_t aecTrigger;
2551    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2552    aecTrigger.trigger_id = -1;
2553    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2554        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2555        aecTrigger.trigger =
2556            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2557        aecTrigger.trigger_id =
2558            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2559    }
2560    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2561                                sizeof(aecTrigger), &aecTrigger);
2562
2563    /*af_trigger must come with a trigger id*/
2564    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2565        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2566        cam_trigger_t af_trigger;
2567        af_trigger.trigger =
2568            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2569        af_trigger.trigger_id =
2570            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2571        rc = AddSetParmEntryToBatch(mParameters,
2572                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2573    }
2574
2575    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2576        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2577        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2578                sizeof(metaMode), &metaMode);
2579    }
2580
2581    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2582        int32_t demosaic =
2583            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2584        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2585                sizeof(demosaic), &demosaic);
2586    }
2587
2588    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2589        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2590        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2591                sizeof(edgeMode), &edgeMode);
2592    }
2593
2594    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2595        int32_t edgeStrength =
2596            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2597        rc = AddSetParmEntryToBatch(mParameters,
2598                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2599    }
2600
2601    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2602        uint8_t flashMode =
2603            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2604        rc = AddSetParmEntryToBatch(mParameters,
2605                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2606    }
2607
2608    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2609        uint8_t flashPower =
2610            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2611        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2612                sizeof(flashPower), &flashPower);
2613    }
2614
2615    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2616        int64_t flashFiringTime =
2617            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2618        rc = AddSetParmEntryToBatch(mParameters,
2619                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2620    }
2621
2622    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2623        uint8_t geometricMode =
2624            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2625        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2626                sizeof(geometricMode), &geometricMode);
2627    }
2628
2629    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2630        uint8_t geometricStrength =
2631            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2632        rc = AddSetParmEntryToBatch(mParameters,
2633                CAM_INTF_META_GEOMETRIC_STRENGTH,
2634                sizeof(geometricStrength), &geometricStrength);
2635    }
2636
2637    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2638        uint8_t hotPixelMode =
2639            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2640        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2641                sizeof(hotPixelMode), &hotPixelMode);
2642    }
2643
2644    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2645        float lensAperture =
2646            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2647        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2648                sizeof(lensAperture), &lensAperture);
2649    }
2650
2651    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2652        float filterDensity =
2653            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2654        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2655                sizeof(filterDensity), &filterDensity);
2656    }
2657
2658    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2659        float focalLength =
2660            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2661        rc = AddSetParmEntryToBatch(mParameters,
2662                CAM_INTF_META_LENS_FOCAL_LENGTH,
2663                sizeof(focalLength), &focalLength);
2664    }
2665
2666    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2667        uint8_t optStabMode =
2668            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2669        rc = AddSetParmEntryToBatch(mParameters,
2670                CAM_INTF_META_LENS_OPT_STAB_MODE,
2671                sizeof(optStabMode), &optStabMode);
2672    }
2673
2674    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2675        uint8_t noiseRedMode =
2676            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2677        rc = AddSetParmEntryToBatch(mParameters,
2678                CAM_INTF_META_NOISE_REDUCTION_MODE,
2679                sizeof(noiseRedMode), &noiseRedMode);
2680    }
2681
2682    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2683        uint8_t noiseRedStrength =
2684            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2685        rc = AddSetParmEntryToBatch(mParameters,
2686                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2687                sizeof(noiseRedStrength), &noiseRedStrength);
2688    }
2689
2690    cam_crop_region_t scalerCropRegion;
2691    bool scalerCropSet = false;
2692    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2693        scalerCropRegion.left =
2694            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2695        scalerCropRegion.top =
2696            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2697        scalerCropRegion.width =
2698            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2699        scalerCropRegion.height =
2700            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2701        rc = AddSetParmEntryToBatch(mParameters,
2702                CAM_INTF_META_SCALER_CROP_REGION,
2703                sizeof(scalerCropRegion), &scalerCropRegion);
2704        scalerCropSet = true;
2705    }
2706
2707    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2708        int64_t sensorExpTime =
2709            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2710        rc = AddSetParmEntryToBatch(mParameters,
2711                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2712                sizeof(sensorExpTime), &sensorExpTime);
2713    }
2714
2715    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2716        int64_t sensorFrameDuration =
2717            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2718        rc = AddSetParmEntryToBatch(mParameters,
2719                CAM_INTF_META_SENSOR_FRAME_DURATION,
2720                sizeof(sensorFrameDuration), &sensorFrameDuration);
2721    }
2722
2723    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2724        int32_t sensorSensitivity =
2725            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2726        rc = AddSetParmEntryToBatch(mParameters,
2727                CAM_INTF_META_SENSOR_SENSITIVITY,
2728                sizeof(sensorSensitivity), &sensorSensitivity);
2729    }
2730
2731    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2732        int32_t shadingMode =
2733            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2734        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2735                sizeof(shadingMode), &shadingMode);
2736    }
2737
2738    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2739        uint8_t shadingStrength =
2740            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2741        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2742                sizeof(shadingStrength), &shadingStrength);
2743    }
2744
2745    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2746        uint8_t facedetectMode =
2747            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2748        rc = AddSetParmEntryToBatch(mParameters,
2749                CAM_INTF_META_STATS_FACEDETECT_MODE,
2750                sizeof(facedetectMode), &facedetectMode);
2751    }
2752
2753    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2754        uint8_t histogramMode =
2755            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2756        rc = AddSetParmEntryToBatch(mParameters,
2757                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2758                sizeof(histogramMode), &histogramMode);
2759    }
2760
2761    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2762        uint8_t sharpnessMapMode =
2763            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2764        rc = AddSetParmEntryToBatch(mParameters,
2765                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2766                sizeof(sharpnessMapMode), &sharpnessMapMode);
2767    }
2768
2769    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2770        uint8_t tonemapMode =
2771            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2772        rc = AddSetParmEntryToBatch(mParameters,
2773                CAM_INTF_META_TONEMAP_MODE,
2774                sizeof(tonemapMode), &tonemapMode);
2775    }
2776
2777    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2778        uint8_t captureIntent =
2779            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2780        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2781                sizeof(captureIntent), &captureIntent);
2782    }
2783
2784    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2785        cam_area_t roi;
2786        bool reset = true;
2787        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2788        if (scalerCropSet) {
2789            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2790        }
2791        if (reset) {
2792            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2793                    sizeof(roi), &roi);
2794        }
2795    }
2796
2797    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2798        cam_area_t roi;
2799        bool reset = true;
2800        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2801        if (scalerCropSet) {
2802            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2803        }
2804        if (reset) {
2805            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2806                    sizeof(roi), &roi);
2807        }
2808    }
2809
2810    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2811        cam_area_t roi;
2812        bool reset = true;
2813        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2814        if (scalerCropSet) {
2815            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2816        }
2817        if (reset) {
2818            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2819                    sizeof(roi), &roi);
2820        }
2821    }
2822    return rc;
2823}
2824
2825/*===========================================================================
2826 * FUNCTION   : getJpegSettings
2827 *
2828 * DESCRIPTION: save the jpeg settings in the HAL
2829 *
2830 *
2831 * PARAMETERS :
2832 *   @settings  : frame settings information from framework
2833 *
2834 *
2835 * RETURN     : success: NO_ERROR
2836 *              failure:
2837 *==========================================================================*/
2838int QCamera3HardwareInterface::getJpegSettings
2839                                  (const camera_metadata_t *settings)
2840{
2841    if (mJpegSettings) {
2842        if (mJpegSettings->gps_timestamp) {
2843            free(mJpegSettings->gps_timestamp);
2844            mJpegSettings->gps_timestamp = NULL;
2845        }
2846        if (mJpegSettings->gps_coordinates) {
2847            for (int i = 0; i < 3; i++) {
2848                free(mJpegSettings->gps_coordinates[i]);
2849                mJpegSettings->gps_coordinates[i] = NULL;
2850            }
2851        }
2852        free(mJpegSettings);
2853        mJpegSettings = NULL;
2854    }
2855    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2856    CameraMetadata jpeg_settings;
2857    jpeg_settings = settings;
2858
2859    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2860        mJpegSettings->jpeg_orientation =
2861            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
2862    } else {
2863        mJpegSettings->jpeg_orientation = 0;
2864    }
2865    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
2866        mJpegSettings->jpeg_quality =
2867            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
2868    } else {
2869        mJpegSettings->jpeg_quality = 85;
2870    }
2871    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2872        mJpegSettings->thumbnail_size.width =
2873            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
2874        mJpegSettings->thumbnail_size.height =
2875            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
2876    } else {
2877        mJpegSettings->thumbnail_size.width = 0;
2878        mJpegSettings->thumbnail_size.height = 0;
2879    }
2880    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
2881        for (int i = 0; i < 3; i++) {
2882            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
2883            *(mJpegSettings->gps_coordinates[i]) =
2884                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
2885        }
2886    } else{
2887       for (int i = 0; i < 3; i++) {
2888            mJpegSettings->gps_coordinates[i] = NULL;
2889        }
2890    }
2891
2892    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
2893        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
2894        *(mJpegSettings->gps_timestamp) =
2895            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
2896    } else {
2897        mJpegSettings->gps_timestamp = NULL;
2898    }
2899
2900    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
2901        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
2902        for (int i = 0; i < len; i++) {
2903            mJpegSettings->gps_processing_method[i] =
2904                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
2905        }
2906        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
2907            mJpegSettings->gps_processing_method[len] = '\0';
2908        }
2909    } else {
2910        mJpegSettings->gps_processing_method[0] = '\0';
2911    }
2912
2913    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2914        mJpegSettings->sensor_sensitivity =
2915            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2916    } else {
2917        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
2918    }
2919
2920    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2921        mJpegSettings->lens_focal_length =
2922            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2923    }
2924    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2925        mJpegSettings->exposure_compensation =
2926            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2927    }
2928    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
2929    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
2930    return 0;
2931}
2932
2933/*===========================================================================
2934 * FUNCTION   : captureResultCb
2935 *
2936 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
2937 *
2938 * PARAMETERS :
2939 *   @frame  : frame information from mm-camera-interface
2940 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
2941 *   @userdata: userdata
2942 *
2943 * RETURN     : NONE
2944 *==========================================================================*/
2945void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
2946                camera3_stream_buffer_t *buffer,
2947                uint32_t frame_number, void *userdata)
2948{
2949    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
2950    if (hw == NULL) {
2951        ALOGE("%s: Invalid hw %p", __func__, hw);
2952        return;
2953    }
2954
2955    hw->captureResultCb(metadata, buffer, frame_number);
2956    return;
2957}
2958
2959/*===========================================================================
2960 * FUNCTION   : initialize
2961 *
2962 * DESCRIPTION: Pass framework callback pointers to HAL
2963 *
2964 * PARAMETERS :
2965 *
2966 *
2967 * RETURN     : Success : 0
2968 *              Failure: -ENODEV
2969 *==========================================================================*/
2970
2971int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
2972                                  const camera3_callback_ops_t *callback_ops)
2973{
2974    ALOGV("%s: E", __func__);
2975    QCamera3HardwareInterface *hw =
2976        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2977    if (!hw) {
2978        ALOGE("%s: NULL camera device", __func__);
2979        return -ENODEV;
2980    }
2981
2982    int rc = hw->initialize(callback_ops);
2983    ALOGV("%s: X", __func__);
2984    return rc;
2985}
2986
2987/*===========================================================================
2988 * FUNCTION   : configure_streams
2989 *
2990 * DESCRIPTION:
2991 *
2992 * PARAMETERS :
2993 *
2994 *
2995 * RETURN     : Success: 0
2996 *              Failure: -EINVAL (if stream configuration is invalid)
2997 *                       -ENODEV (fatal error)
2998 *==========================================================================*/
2999
3000int QCamera3HardwareInterface::configure_streams(
3001        const struct camera3_device *device,
3002        camera3_stream_configuration_t *stream_list)
3003{
3004    ALOGV("%s: E", __func__);
3005    QCamera3HardwareInterface *hw =
3006        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3007    if (!hw) {
3008        ALOGE("%s: NULL camera device", __func__);
3009        return -ENODEV;
3010    }
3011    int rc = hw->configureStreams(stream_list);
3012    ALOGV("%s: X", __func__);
3013    return rc;
3014}
3015
3016/*===========================================================================
3017 * FUNCTION   : register_stream_buffers
3018 *
3019 * DESCRIPTION: Register stream buffers with the device
3020 *
3021 * PARAMETERS :
3022 *
3023 * RETURN     :
3024 *==========================================================================*/
3025int QCamera3HardwareInterface::register_stream_buffers(
3026        const struct camera3_device *device,
3027        const camera3_stream_buffer_set_t *buffer_set)
3028{
3029    ALOGV("%s: E", __func__);
3030    QCamera3HardwareInterface *hw =
3031        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3032    if (!hw) {
3033        ALOGE("%s: NULL camera device", __func__);
3034        return -ENODEV;
3035    }
3036    int rc = hw->registerStreamBuffers(buffer_set);
3037    ALOGV("%s: X", __func__);
3038    return rc;
3039}
3040
3041/*===========================================================================
3042 * FUNCTION   : construct_default_request_settings
3043 *
3044 * DESCRIPTION: Configure a settings buffer to meet the required use case
3045 *
3046 * PARAMETERS :
3047 *
3048 *
3049 * RETURN     : Success: Return valid metadata
3050 *              Failure: Return NULL
3051 *==========================================================================*/
3052const camera_metadata_t* QCamera3HardwareInterface::
3053    construct_default_request_settings(const struct camera3_device *device,
3054                                        int type)
3055{
3056
3057    ALOGV("%s: E", __func__);
3058    camera_metadata_t* fwk_metadata = NULL;
3059    QCamera3HardwareInterface *hw =
3060        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3061    if (!hw) {
3062        ALOGE("%s: NULL camera device", __func__);
3063        return NULL;
3064    }
3065
3066    fwk_metadata = hw->translateCapabilityToMetadata(type);
3067
3068    ALOGV("%s: X", __func__);
3069    return fwk_metadata;
3070}
3071
3072/*===========================================================================
3073 * FUNCTION   : process_capture_request
3074 *
3075 * DESCRIPTION:
3076 *
3077 * PARAMETERS :
3078 *
3079 *
3080 * RETURN     :
3081 *==========================================================================*/
3082int QCamera3HardwareInterface::process_capture_request(
3083                    const struct camera3_device *device,
3084                    camera3_capture_request_t *request)
3085{
3086    ALOGV("%s: E", __func__);
3087    QCamera3HardwareInterface *hw =
3088        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3089    if (!hw) {
3090        ALOGE("%s: NULL camera device", __func__);
3091        return -EINVAL;
3092    }
3093
3094    int rc = hw->processCaptureRequest(request);
3095    ALOGV("%s: X", __func__);
3096    return rc;
3097}
3098
3099/*===========================================================================
3100 * FUNCTION   : get_metadata_vendor_tag_ops
3101 *
3102 * DESCRIPTION:
3103 *
3104 * PARAMETERS :
3105 *
3106 *
3107 * RETURN     :
3108 *==========================================================================*/
3109
3110void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3111                const struct camera3_device *device,
3112                vendor_tag_query_ops_t* ops)
3113{
3114    ALOGV("%s: E", __func__);
3115    QCamera3HardwareInterface *hw =
3116        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3117    if (!hw) {
3118        ALOGE("%s: NULL camera device", __func__);
3119        return;
3120    }
3121
3122    hw->getMetadataVendorTagOps(ops);
3123    ALOGV("%s: X", __func__);
3124    return;
3125}
3126
3127/*===========================================================================
3128 * FUNCTION   : dump
3129 *
3130 * DESCRIPTION:
3131 *
3132 * PARAMETERS :
3133 *
3134 *
3135 * RETURN     :
3136 *==========================================================================*/
3137
3138void QCamera3HardwareInterface::dump(
3139                const struct camera3_device *device, int fd)
3140{
3141    ALOGV("%s: E", __func__);
3142    QCamera3HardwareInterface *hw =
3143        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3144    if (!hw) {
3145        ALOGE("%s: NULL camera device", __func__);
3146        return;
3147    }
3148
3149    hw->dump(fd);
3150    ALOGV("%s: X", __func__);
3151    return;
3152}
3153
3154/*===========================================================================
3155 * FUNCTION   : close_camera_device
3156 *
3157 * DESCRIPTION:
3158 *
3159 * PARAMETERS :
3160 *
3161 *
3162 * RETURN     :
3163 *==========================================================================*/
3164int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3165{
3166    ALOGV("%s: E", __func__);
3167    int ret = NO_ERROR;
3168    QCamera3HardwareInterface *hw =
3169        reinterpret_cast<QCamera3HardwareInterface *>(
3170            reinterpret_cast<camera3_device_t *>(device)->priv);
3171    if (!hw) {
3172        ALOGE("NULL camera device");
3173        return BAD_VALUE;
3174    }
3175    delete hw;
3176    ALOGV("%s: X", __func__);
3177    return ret;
3178}
3179
3180}; //end namespace qcamera
3181