QCamera3HWI.cpp revision 9443c6d3cd31da9b89221a4261debfeddafe3b00
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54    PTHREAD_MUTEX_INITIALIZER;
55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56
57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67};
68
69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79};
80
81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97};
98
99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106};
107
108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113};
114
115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121};
122
123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
126    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
127};
128
129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                             320, 240, 176, 144, 0, 0};
131
132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133    initialize:                         QCamera3HardwareInterface::initialize,
134    configure_streams:                  QCamera3HardwareInterface::configure_streams,
135    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139    dump:                               QCamera3HardwareInterface::dump,
140};
141
142
143/*===========================================================================
144 * FUNCTION   : QCamera3HardwareInterface
145 *
146 * DESCRIPTION: constructor of QCamera3HardwareInterface
147 *
148 * PARAMETERS :
149 *   @cameraId  : camera ID
150 *
151 * RETURN     : none
152 *==========================================================================*/
153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154    : mCameraId(cameraId),
155      mCameraHandle(NULL),
156      mCameraOpened(false),
157      mCameraInitialized(false),
158      mCallbackOps(NULL),
159      mInputStream(NULL),
160      mMetadataChannel(NULL),
161      mFirstRequest(false),
162      mParamHeap(NULL),
163      mParameters(NULL),
164      mJpegSettings(NULL)
165{
166    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
167    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
168    mCameraDevice.common.close = close_camera_device;
169    mCameraDevice.ops = &mCameraOps;
170    mCameraDevice.priv = this;
171    gCamCapability[cameraId]->version = CAM_HAL_V3;
172
173    pthread_mutex_init(&mRequestLock, NULL);
174    pthread_cond_init(&mRequestCond, NULL);
175    mPendingRequest = 0;
176    mCurrentRequestId = -1;
177
178    pthread_mutex_init(&mMutex, NULL);
179    pthread_mutex_init(&mCaptureResultLock, NULL);
180
181    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
182        mDefaultMetadata[i] = NULL;
183}
184
185/*===========================================================================
186 * FUNCTION   : ~QCamera3HardwareInterface
187 *
188 * DESCRIPTION: destructor of QCamera3HardwareInterface
189 *
190 * PARAMETERS : none
191 *
192 * RETURN     : none
193 *==========================================================================*/
194QCamera3HardwareInterface::~QCamera3HardwareInterface()
195{
196    ALOGV("%s: E", __func__);
197    /* We need to stop all streams before deleting any stream */
198    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
199        it != mStreamInfo.end(); it++) {
200        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
201        if (channel)
202            channel->stop();
203    }
204    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
205        it != mStreamInfo.end(); it++) {
206        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
207        if (channel)
208            delete channel;
209        free (*it);
210    }
211
212    if (mJpegSettings != NULL) {
213        free(mJpegSettings);
214        mJpegSettings = NULL;
215    }
216
217    /* Clean up all channels */
218    if (mCameraInitialized) {
219        mMetadataChannel->stop();
220        delete mMetadataChannel;
221        mMetadataChannel = NULL;
222        deinitParameters();
223    }
224
225    if (mCameraOpened)
226        closeCamera();
227
228    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
229        if (mDefaultMetadata[i])
230            free_camera_metadata(mDefaultMetadata[i]);
231
232    pthread_mutex_destroy(&mRequestLock);
233    pthread_cond_destroy(&mRequestCond);
234
235    pthread_mutex_destroy(&mMutex);
236    pthread_mutex_destroy(&mCaptureResultLock);
237    ALOGV("%s: X", __func__);
238}
239
240/*===========================================================================
241 * FUNCTION   : openCamera
242 *
243 * DESCRIPTION: open camera
244 *
245 * PARAMETERS :
246 *   @hw_device  : double ptr for camera device struct
247 *
248 * RETURN     : int32_t type of status
249 *              NO_ERROR  -- success
250 *              none-zero failure code
251 *==========================================================================*/
252int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
253{
254    int rc = 0;
255    pthread_mutex_lock(&mCameraSessionLock);
256    if (mCameraSessionActive) {
257        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
258        pthread_mutex_unlock(&mCameraSessionLock);
259        return INVALID_OPERATION;
260    }
261
262    if (mCameraOpened) {
263        *hw_device = NULL;
264        return PERMISSION_DENIED;
265    }
266
267    rc = openCamera();
268    if (rc == 0) {
269        *hw_device = &mCameraDevice.common;
270        mCameraSessionActive = 1;
271    } else
272        *hw_device = NULL;
273
274    pthread_mutex_unlock(&mCameraSessionLock);
275    return rc;
276}
277
278/*===========================================================================
279 * FUNCTION   : openCamera
280 *
281 * DESCRIPTION: open camera
282 *
283 * PARAMETERS : none
284 *
285 * RETURN     : int32_t type of status
286 *              NO_ERROR  -- success
287 *              none-zero failure code
288 *==========================================================================*/
289int QCamera3HardwareInterface::openCamera()
290{
291    if (mCameraHandle) {
292        ALOGE("Failure: Camera already opened");
293        return ALREADY_EXISTS;
294    }
295    mCameraHandle = camera_open(mCameraId);
296    if (!mCameraHandle) {
297        ALOGE("camera_open failed.");
298        return UNKNOWN_ERROR;
299    }
300
301    mCameraOpened = true;
302
303    return NO_ERROR;
304}
305
306/*===========================================================================
307 * FUNCTION   : closeCamera
308 *
309 * DESCRIPTION: close camera
310 *
311 * PARAMETERS : none
312 *
313 * RETURN     : int32_t type of status
314 *              NO_ERROR  -- success
315 *              none-zero failure code
316 *==========================================================================*/
317int QCamera3HardwareInterface::closeCamera()
318{
319    int rc = NO_ERROR;
320
321    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
322    mCameraHandle = NULL;
323    mCameraOpened = false;
324
325    return rc;
326}
327
328/*===========================================================================
329 * FUNCTION   : initialize
330 *
331 * DESCRIPTION: Initialize frameworks callback functions
332 *
333 * PARAMETERS :
334 *   @callback_ops : callback function to frameworks
335 *
336 * RETURN     :
337 *
338 *==========================================================================*/
339int QCamera3HardwareInterface::initialize(
340        const struct camera3_callback_ops *callback_ops)
341{
342    int rc;
343
344    pthread_mutex_lock(&mMutex);
345
346    rc = initParameters();
347    if (rc < 0) {
348        ALOGE("%s: initParamters failed %d", __func__, rc);
349       goto err1;
350    }
351    //Create metadata channel and initialize it
352    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
353                    mCameraHandle->ops, captureResultCb,
354                    &gCamCapability[mCameraId]->padding_info, this);
355    if (mMetadataChannel == NULL) {
356        ALOGE("%s: failed to allocate metadata channel", __func__);
357        rc = -ENOMEM;
358        goto err2;
359    }
360    rc = mMetadataChannel->initialize();
361    if (rc < 0) {
362        ALOGE("%s: metadata channel initialization failed", __func__);
363        goto err3;
364    }
365
366    mCallbackOps = callback_ops;
367
368    pthread_mutex_unlock(&mMutex);
369    mCameraInitialized = true;
370    return 0;
371
372err3:
373    delete mMetadataChannel;
374    mMetadataChannel = NULL;
375err2:
376    deinitParameters();
377err1:
378    pthread_mutex_unlock(&mMutex);
379    return rc;
380}
381
382/*===========================================================================
383 * FUNCTION   : configureStreams
384 *
385 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
386 *              and output streams.
387 *
388 * PARAMETERS :
389 *   @stream_list : streams to be configured
390 *
391 * RETURN     :
392 *
393 *==========================================================================*/
394int QCamera3HardwareInterface::configureStreams(
395        camera3_stream_configuration_t *streamList)
396{
397    int rc = 0;
398    pthread_mutex_lock(&mMutex);
399
400    // Sanity check stream_list
401    if (streamList == NULL) {
402        ALOGE("%s: NULL stream configuration", __func__);
403        pthread_mutex_unlock(&mMutex);
404        return BAD_VALUE;
405    }
406
407    if (streamList->streams == NULL) {
408        ALOGE("%s: NULL stream list", __func__);
409        pthread_mutex_unlock(&mMutex);
410        return BAD_VALUE;
411    }
412
413    if (streamList->num_streams < 1) {
414        ALOGE("%s: Bad number of streams requested: %d", __func__,
415                streamList->num_streams);
416        pthread_mutex_unlock(&mMutex);
417        return BAD_VALUE;
418    }
419
420    camera3_stream_t *inputStream = NULL;
421    /* first invalidate all the steams in the mStreamList
422     * if they appear again, they will be validated */
423    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
424            it != mStreamInfo.end(); it++) {
425        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
426        channel->stop();
427        (*it)->status = INVALID;
428    }
429
430    for (size_t i = 0; i < streamList->num_streams; i++) {
431        camera3_stream_t *newStream = streamList->streams[i];
432        ALOGV("%s: newStream type = %d, stream format = %d",
433                __func__, newStream->stream_type, newStream->format);
434        //if the stream is in the mStreamList validate it
435        bool stream_exists = false;
436        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
437                it != mStreamInfo.end(); it++) {
438            if ((*it)->stream == newStream) {
439                QCamera3Channel *channel =
440                    (QCamera3Channel*)(*it)->stream->priv;
441                stream_exists = true;
442                (*it)->status = RECONFIGURE;
443                /*delete the channel object associated with the stream because
444                  we need to reconfigure*/
445                delete channel;
446                (*it)->stream->priv = NULL;
447            }
448        }
449        if (!stream_exists) {
450            //new stream
451            stream_info_t* stream_info;
452            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
453            stream_info->stream = newStream;
454            stream_info->status = VALID;
455            stream_info->registered = 0;
456            mStreamInfo.push_back(stream_info);
457        }
458        if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
459            if (inputStream != NULL) {
460                ALOGE("%s: Multiple input streams requested!", __func__);
461                pthread_mutex_unlock(&mMutex);
462                return BAD_VALUE;
463            }
464            inputStream = newStream;
465        }
466    }
467    mInputStream = inputStream;
468
469    /*clean up invalid streams*/
470    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
471            it != mStreamInfo.end();) {
472        if(((*it)->status) == INVALID){
473            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
474            delete channel;
475            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
476            free(*it);
477            it = mStreamInfo.erase(it);
478        } else {
479            it++;
480        }
481    }
482
483    //mMetadataChannel->stop();
484
485    /* Allocate channel objects for the requested streams */
486    for (size_t i = 0; i < streamList->num_streams; i++) {
487        camera3_stream_t *newStream = streamList->streams[i];
488        if (newStream->priv == NULL) {
489            //New stream, construct channel
490            switch (newStream->stream_type) {
491            case CAMERA3_STREAM_INPUT:
492                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
493                break;
494            case CAMERA3_STREAM_BIDIRECTIONAL:
495                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
496                    GRALLOC_USAGE_HW_CAMERA_WRITE;
497                break;
498            case CAMERA3_STREAM_OUTPUT:
499                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
500                break;
501            default:
502                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
503                break;
504            }
505
506            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
507                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
508                QCamera3Channel *channel;
509                switch (newStream->format) {
510                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
511                case HAL_PIXEL_FORMAT_YCbCr_420_888:
512                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
513                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
514                            mCameraHandle->ops, captureResultCb,
515                            &gCamCapability[mCameraId]->padding_info, this, newStream);
516                    if (channel == NULL) {
517                        ALOGE("%s: allocation of channel failed", __func__);
518                        pthread_mutex_unlock(&mMutex);
519                        return -ENOMEM;
520                    }
521
522                    newStream->priv = channel;
523                    break;
524                case HAL_PIXEL_FORMAT_BLOB:
525                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
526                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
527                            mCameraHandle->ops, captureResultCb,
528                            &gCamCapability[mCameraId]->padding_info, this, newStream);
529                    if (channel == NULL) {
530                        ALOGE("%s: allocation of channel failed", __func__);
531                        pthread_mutex_unlock(&mMutex);
532                        return -ENOMEM;
533                    }
534                    newStream->priv = channel;
535                    break;
536
537                //TODO: Add support for app consumed format?
538                default:
539                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
540                    break;
541                }
542            }
543        } else {
544            // Channel already exists for this stream
545            // Do nothing for now
546        }
547    }
548    /*For the streams to be reconfigured we need to register the buffers
549      since the framework wont*/
550    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
551            it != mStreamInfo.end(); it++) {
552        if ((*it)->status == RECONFIGURE) {
553            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
554            /*only register buffers for streams that have already been
555              registered*/
556            if ((*it)->registered) {
557                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
558                        (*it)->buffer_set.buffers);
559                if (rc != NO_ERROR) {
560                    ALOGE("%s: Failed to register the buffers of old stream,\
561                            rc = %d", __func__, rc);
562                }
563                ALOGV("%s: channel %p has %d buffers",
564                        __func__, channel, (*it)->buffer_set.num_buffers);
565            }
566        }
567
568        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
569        if (index == NAME_NOT_FOUND) {
570            mPendingBuffersMap.add((*it)->stream, 0);
571        } else {
572            mPendingBuffersMap.editValueAt(index) = 0;
573        }
574    }
575
576    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
577    mPendingRequestsList.clear();
578
579    //settings/parameters don't carry over for new configureStreams
580    memset(mParameters, 0, sizeof(parm_buffer_t));
581    mFirstRequest = true;
582
583    pthread_mutex_unlock(&mMutex);
584    return rc;
585}
586
587/*===========================================================================
588 * FUNCTION   : validateCaptureRequest
589 *
590 * DESCRIPTION: validate a capture request from camera service
591 *
592 * PARAMETERS :
593 *   @request : request from framework to process
594 *
595 * RETURN     :
596 *
597 *==========================================================================*/
598int QCamera3HardwareInterface::validateCaptureRequest(
599                    camera3_capture_request_t *request)
600{
601    ssize_t idx = 0;
602    const camera3_stream_buffer_t *b;
603    CameraMetadata meta;
604
605    /* Sanity check the request */
606    if (request == NULL) {
607        ALOGE("%s: NULL capture request", __func__);
608        return BAD_VALUE;
609    }
610
611    uint32_t frameNumber = request->frame_number;
612    if (request->input_buffer != NULL &&
613            request->input_buffer->stream != mInputStream) {
614        ALOGE("%s: Request %d: Input buffer not from input stream!",
615                __FUNCTION__, frameNumber);
616        return BAD_VALUE;
617    }
618    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
619        ALOGE("%s: Request %d: No output buffers provided!",
620                __FUNCTION__, frameNumber);
621        return BAD_VALUE;
622    }
623    if (request->input_buffer != NULL) {
624        //TODO
625        ALOGE("%s: Not supporting input buffer yet", __func__);
626        return BAD_VALUE;
627    }
628
629    // Validate all buffers
630    b = request->output_buffers;
631    do {
632        QCamera3Channel *channel =
633                static_cast<QCamera3Channel*>(b->stream->priv);
634        if (channel == NULL) {
635            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
636                    __func__, frameNumber, idx);
637            return BAD_VALUE;
638        }
639        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
640            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
641                    __func__, frameNumber, idx);
642            return BAD_VALUE;
643        }
644        if (b->release_fence != -1) {
645            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
646                    __func__, frameNumber, idx);
647            return BAD_VALUE;
648        }
649        if (b->buffer == NULL) {
650            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
651                    __func__, frameNumber, idx);
652            return BAD_VALUE;
653        }
654        idx++;
655        b = request->output_buffers + idx;
656    } while (idx < (ssize_t)request->num_output_buffers);
657
658    return NO_ERROR;
659}
660
661/*===========================================================================
662 * FUNCTION   : registerStreamBuffers
663 *
664 * DESCRIPTION: Register buffers for a given stream with the HAL device.
665 *
666 * PARAMETERS :
667 *   @stream_list : streams to be configured
668 *
669 * RETURN     :
670 *
671 *==========================================================================*/
672int QCamera3HardwareInterface::registerStreamBuffers(
673        const camera3_stream_buffer_set_t *buffer_set)
674{
675    int rc = 0;
676
677    pthread_mutex_lock(&mMutex);
678
679    if (buffer_set == NULL) {
680        ALOGE("%s: Invalid buffer_set parameter.", __func__);
681        pthread_mutex_unlock(&mMutex);
682        return -EINVAL;
683    }
684    if (buffer_set->stream == NULL) {
685        ALOGE("%s: Invalid stream parameter.", __func__);
686        pthread_mutex_unlock(&mMutex);
687        return -EINVAL;
688    }
689    if (buffer_set->num_buffers < 1) {
690        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
691        pthread_mutex_unlock(&mMutex);
692        return -EINVAL;
693    }
694    if (buffer_set->buffers == NULL) {
695        ALOGE("%s: Invalid buffers parameter.", __func__);
696        pthread_mutex_unlock(&mMutex);
697        return -EINVAL;
698    }
699
700    camera3_stream_t *stream = buffer_set->stream;
701    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
702
703    //set the buffer_set in the mStreamInfo array
704    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
705            it != mStreamInfo.end(); it++) {
706        if ((*it)->stream == stream) {
707            uint32_t numBuffers = buffer_set->num_buffers;
708            (*it)->buffer_set.stream = buffer_set->stream;
709            (*it)->buffer_set.num_buffers = numBuffers;
710            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
711            if ((*it)->buffer_set.buffers == NULL) {
712                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
713                pthread_mutex_unlock(&mMutex);
714                return -ENOMEM;
715            }
716            for (size_t j = 0; j < numBuffers; j++){
717                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
718            }
719            (*it)->registered = 1;
720        }
721    }
722
723    if (stream->stream_type != CAMERA3_STREAM_OUTPUT) {
724        ALOGE("%s: not yet support non output type stream", __func__);
725        pthread_mutex_unlock(&mMutex);
726        return -EINVAL;
727    }
728    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
729    if (rc < 0) {
730        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
731        pthread_mutex_unlock(&mMutex);
732        return -ENODEV;
733    }
734
735    pthread_mutex_unlock(&mMutex);
736    return NO_ERROR;
737}
738
739/*===========================================================================
740 * FUNCTION   : processCaptureRequest
741 *
742 * DESCRIPTION: process a capture request from camera service
743 *
744 * PARAMETERS :
745 *   @request : request from framework to process
746 *
747 * RETURN     :
748 *
749 *==========================================================================*/
750int QCamera3HardwareInterface::processCaptureRequest(
751                    camera3_capture_request_t *request)
752{
753    int rc = NO_ERROR;
754    int32_t request_id;
755    CameraMetadata meta;
756
757    pthread_mutex_lock(&mMutex);
758
759    rc = validateCaptureRequest(request);
760    if (rc != NO_ERROR) {
761        ALOGE("%s: incoming request is not valid", __func__);
762        pthread_mutex_unlock(&mMutex);
763        return rc;
764    }
765
766    uint32_t frameNumber = request->frame_number;
767
768    rc = setFrameParameters(request->frame_number, request->settings);
769    if (rc < 0) {
770        ALOGE("%s: fail to set frame parameters", __func__);
771        pthread_mutex_unlock(&mMutex);
772        return rc;
773    }
774
775    meta = request->settings;
776    if (meta.exists(ANDROID_REQUEST_ID)) {
777        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
778        mCurrentRequestId = request_id;
779        ALOGV("%s: Received request with id: %d",__func__, request_id);
780    } else if (mFirstRequest || mCurrentRequestId == -1){
781        ALOGE("%s: Unable to find request id field, \
782                & no previous id available", __func__);
783        return NAME_NOT_FOUND;
784    } else {
785        ALOGV("%s: Re-using old request id", __func__);
786        request_id = mCurrentRequestId;
787    }
788
789
790    ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__,
791                                    request->num_output_buffers);
792    // Acquire all request buffers first
793    for (size_t i = 0; i < request->num_output_buffers; i++) {
794        const camera3_stream_buffer_t& output = request->output_buffers[i];
795        sp<Fence> acquireFence = new Fence(output.acquire_fence);
796
797        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
798        //Call function to store local copy of jpeg data for encode params.
799            rc = getJpegSettings(request->settings);
800            if (rc < 0) {
801                ALOGE("%s: failed to get jpeg parameters", __func__);
802                pthread_mutex_unlock(&mMutex);
803                return rc;
804            }
805        }
806
807        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
808        if (rc != OK) {
809            ALOGE("%s: fence wait failed %d", __func__, rc);
810            pthread_mutex_unlock(&mMutex);
811            return rc;
812        }
813    }
814
815    /* Update pending request list and pending buffers map */
816    pthread_mutex_lock(&mRequestLock);
817    PendingRequestInfo pendingRequest;
818    pendingRequest.frame_number = frameNumber;
819    pendingRequest.num_buffers = request->num_output_buffers;
820    pendingRequest.request_id = request_id;
821
822    for (size_t i = 0; i < request->num_output_buffers; i++) {
823        RequestedBufferInfo requestedBuf;
824        requestedBuf.stream = request->output_buffers[i].stream;
825        requestedBuf.buffer = NULL;
826        pendingRequest.buffers.push_back(requestedBuf);
827
828        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
829    }
830    mPendingRequestsList.push_back(pendingRequest);
831    pthread_mutex_unlock(&mRequestLock);
832
833    // Notify metadata channel we receive a request
834    mMetadataChannel->request(NULL, frameNumber);
835
836    // Call request on other streams
837    for (size_t i = 0; i < request->num_output_buffers; i++) {
838        const camera3_stream_buffer_t& output = request->output_buffers[i];
839        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
840
841        if (channel == NULL) {
842            ALOGE("%s: invalid channel pointer for stream", __func__);
843            continue;
844        }
845
846        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
847            rc = channel->request(output.buffer, frameNumber, mJpegSettings);
848        } else {
849            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
850                __LINE__, output.buffer, frameNumber);
851            rc = channel->request(output.buffer, frameNumber);
852        }
853        if (rc < 0)
854            ALOGE("%s: request failed", __func__);
855    }
856
857    mFirstRequest = false;
858
859    //Block on conditional variable
860    pthread_mutex_lock(&mRequestLock);
861    mPendingRequest = 1;
862    while (mPendingRequest == 1) {
863        pthread_cond_wait(&mRequestCond, &mRequestLock);
864    }
865    pthread_mutex_unlock(&mRequestLock);
866
867    pthread_mutex_unlock(&mMutex);
868    return rc;
869}
870
871/*===========================================================================
872 * FUNCTION   : getMetadataVendorTagOps
873 *
874 * DESCRIPTION:
875 *
876 * PARAMETERS :
877 *
878 *
879 * RETURN     :
880 *==========================================================================*/
881void QCamera3HardwareInterface::getMetadataVendorTagOps(
882                    vendor_tag_query_ops_t* /*ops*/)
883{
884    /* Enable locks when we eventually add Vendor Tags */
885    /*
886    pthread_mutex_lock(&mMutex);
887
888    pthread_mutex_unlock(&mMutex);
889    */
890    return;
891}
892
893/*===========================================================================
894 * FUNCTION   : dump
895 *
896 * DESCRIPTION:
897 *
898 * PARAMETERS :
899 *
900 *
901 * RETURN     :
902 *==========================================================================*/
903void QCamera3HardwareInterface::dump(int /*fd*/)
904{
905    /*Enable lock when we implement this function*/
906    /*
907    pthread_mutex_lock(&mMutex);
908
909    pthread_mutex_unlock(&mMutex);
910    */
911    return;
912}
913
914/*===========================================================================
915 * FUNCTION   : captureResultCb
916 *
917 * DESCRIPTION: Callback handler for all capture result
918 *              (streams, as well as metadata)
919 *
920 * PARAMETERS :
921 *   @metadata : metadata information
922 *   @buffer   : actual gralloc buffer to be returned to frameworks.
923 *               NULL if metadata.
924 *
925 * RETURN     : NONE
926 *==========================================================================*/
927void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
928                camera3_stream_buffer_t *buffer, uint32_t frame_number)
929{
930    pthread_mutex_lock(&mRequestLock);
931
932    if (metadata_buf) {
933        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
934        int32_t frame_number_valid = *(int32_t *)
935            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
936        uint32_t frame_number = *(uint32_t *)
937            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
938        const struct timeval *tv = (const struct timeval *)
939            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
940        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
941            tv->tv_usec * NSEC_PER_USEC;
942
943        if (!frame_number_valid) {
944            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
945            mMetadataChannel->bufDone(metadata_buf);
946            goto done_metadata;
947        }
948        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
949                frame_number, capture_time);
950
951        // Go through the pending requests info and send shutter/results to frameworks
952        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
953                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
954            camera3_capture_result_t result;
955            camera3_notify_msg_t notify_msg;
956            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
957
958            // Flush out all entries with less or equal frame numbers.
959
960            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
961            //Right now it's the same as metadata timestamp
962
963            //TODO: When there is metadata drop, how do we derive the timestamp of
964            //dropped frames? For now, we fake the dropped timestamp by substracting
965            //from the reported timestamp
966            nsecs_t current_capture_time = capture_time -
967                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
968
969            // Send shutter notify to frameworks
970            notify_msg.type = CAMERA3_MSG_SHUTTER;
971            notify_msg.message.shutter.frame_number = i->frame_number;
972            notify_msg.message.shutter.timestamp = current_capture_time;
973            mCallbackOps->notify(mCallbackOps, &notify_msg);
974            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
975                    i->frame_number, capture_time);
976
977            // Send empty metadata with already filled buffers for dropped metadata
978            // and send valid metadata with already filled buffers for current metadata
979            if (i->frame_number < frame_number) {
980                CameraMetadata dummyMetadata;
981                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
982                        &current_capture_time, 1);
983                dummyMetadata.update(ANDROID_REQUEST_ID,
984                        &(i->request_id), 1);
985                result.result = dummyMetadata.release();
986            } else {
987                result.result = translateCbMetadataToResultMetadata(metadata,
988                        current_capture_time, i->request_id);
989                // Return metadata buffer
990                mMetadataChannel->bufDone(metadata_buf);
991            }
992            if (!result.result) {
993                ALOGE("%s: metadata is NULL", __func__);
994            }
995            result.frame_number = i->frame_number;
996            result.num_output_buffers = 0;
997            result.output_buffers = NULL;
998            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
999                    j != i->buffers.end(); j++) {
1000                if (j->buffer) {
1001                    result.num_output_buffers++;
1002                }
1003            }
1004
1005            if (result.num_output_buffers > 0) {
1006                camera3_stream_buffer_t *result_buffers =
1007                    new camera3_stream_buffer_t[result.num_output_buffers];
1008                if (!result_buffers) {
1009                    ALOGE("%s: Fatal error: out of memory", __func__);
1010                }
1011                size_t result_buffers_idx = 0;
1012                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1013                        j != i->buffers.end(); j++) {
1014                    if (j->buffer) {
1015                        result_buffers[result_buffers_idx++] = *(j->buffer);
1016                        free(j->buffer);
1017                        mPendingBuffersMap.editValueFor(j->stream)--;
1018                    }
1019                }
1020                result.output_buffers = result_buffers;
1021
1022                mCallbackOps->process_capture_result(mCallbackOps, &result);
1023                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1024                        __func__, result.frame_number, current_capture_time);
1025                free_camera_metadata((camera_metadata_t *)result.result);
1026                delete[] result_buffers;
1027            } else {
1028                mCallbackOps->process_capture_result(mCallbackOps, &result);
1029                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1030                        __func__, result.frame_number, current_capture_time);
1031                free_camera_metadata((camera_metadata_t *)result.result);
1032            }
1033            // erase the element from the list
1034            i = mPendingRequestsList.erase(i);
1035        }
1036
1037
1038done_metadata:
1039        bool max_buffers_dequeued = false;
1040        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1041            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1042            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1043            if (queued_buffers == stream->max_buffers) {
1044                max_buffers_dequeued = true;
1045                break;
1046            }
1047        }
1048        if (!max_buffers_dequeued) {
1049            // Unblock process_capture_request
1050            mPendingRequest = 0;
1051            pthread_cond_signal(&mRequestCond);
1052        }
1053    } else {
1054        // If the frame number doesn't exist in the pending request list,
1055        // directly send the buffer to the frameworks, and update pending buffers map
1056        // Otherwise, book-keep the buffer.
1057        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1058        while (i != mPendingRequestsList.end() && i->frame_number != frame_number)
1059            i++;
1060        if (i == mPendingRequestsList.end()) {
1061            // Verify all pending requests frame_numbers are greater
1062            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1063                    j != mPendingRequestsList.end(); j++) {
1064                if (j->frame_number < frame_number) {
1065                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1066                            __func__, j->frame_number, frame_number);
1067                }
1068            }
1069            camera3_capture_result_t result;
1070            result.result = NULL;
1071            result.frame_number = frame_number;
1072            result.num_output_buffers = 1;
1073            result.output_buffers = buffer;
1074            ALOGV("%s: result frame_number = %d, buffer = %p",
1075                    __func__, frame_number, buffer);
1076            mPendingBuffersMap.editValueFor(buffer->stream)--;
1077            mCallbackOps->process_capture_result(mCallbackOps, &result);
1078        } else {
1079            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1080                    j != i->buffers.end(); j++) {
1081                if (j->stream == buffer->stream) {
1082                    if (j->buffer != NULL) {
1083                        ALOGE("%s: Error: buffer is already set", __func__);
1084                    } else {
1085                        j->buffer = (camera3_stream_buffer_t *)malloc(
1086                                sizeof(camera3_stream_buffer_t));
1087                        *(j->buffer) = *buffer;
1088                        ALOGV("%s: cache buffer %p at result frame_number %d",
1089                                __func__, buffer, frame_number);
1090                    }
1091                }
1092            }
1093        }
1094    }
1095
1096    pthread_mutex_unlock(&mRequestLock);
1097    return;
1098}
1099
1100/*===========================================================================
1101 * FUNCTION   : translateCbMetadataToResultMetadata
1102 *
1103 * DESCRIPTION:
1104 *
1105 * PARAMETERS :
1106 *   @metadata : metadata information from callback
1107 *
1108 * RETURN     : camera_metadata_t*
1109 *              metadata in a format specified by fwk
1110 *==========================================================================*/
1111camera_metadata_t*
1112QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1113                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1114                                 int32_t request_id)
1115{
1116    CameraMetadata camMetadata;
1117    camera_metadata_t* resultMetadata;
1118
1119    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1120    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1121
1122    /*CAM_INTF_META_HISTOGRAM - TODO*/
1123    /*cam_hist_stats_t  *histogram =
1124      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1125      metadata);*/
1126
1127    /*face detection*/
1128    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1129        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1130    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1131    int32_t faceIds[numFaces];
1132    uint8_t faceScores[numFaces];
1133    int32_t faceRectangles[numFaces * 4];
1134    int32_t faceLandmarks[numFaces * 6];
1135    int j = 0, k = 0;
1136    for (int i = 0; i < numFaces; i++) {
1137        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1138        faceScores[i] = faceDetectionInfo->faces[i].score;
1139        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1140                faceRectangles+j, -1);
1141        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1142        j+= 4;
1143        k+= 6;
1144    }
1145    if (numFaces > 0) {
1146        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1147        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1148        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1149            faceRectangles, numFaces*4);
1150        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1151            faceLandmarks, numFaces*6);
1152    }
1153
1154    uint8_t  *color_correct_mode =
1155        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1156    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1157
1158    int32_t  *ae_precapture_id =
1159        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1160    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1161
1162    /*aec regions*/
1163    cam_area_t  *hAeRegions =
1164        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1165    int32_t aeRegions[5];
1166    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1167    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1168
1169    uint8_t  *ae_state =
1170        (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1171    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1172
1173    uint8_t  *focusMode =
1174        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1175    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1176
1177    /*af regions*/
1178    cam_area_t  *hAfRegions =
1179        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1180    int32_t afRegions[5];
1181    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1182    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1183
1184    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1185    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1186
1187    int32_t  *afTriggerId =
1188        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1189    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1190
1191    uint8_t  *whiteBalance =
1192        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1193    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1194
1195    /*awb regions*/
1196    cam_area_t  *hAwbRegions =
1197        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1198    int32_t awbRegions[5];
1199    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1200    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1201
1202    uint8_t  *whiteBalanceState =
1203        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1204    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1205
1206    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1207    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1208
1209    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1210    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1211
1212    uint8_t  *flashPower =
1213        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1214    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1215
1216    int64_t  *flashFiringTime =
1217        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1218    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1219
1220    /*int32_t  *ledMode =
1221      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1222      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1223
1224    uint8_t  *flashState =
1225        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1226    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1227
1228    uint8_t  *hotPixelMode =
1229        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1230    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1231
1232    float  *lensAperture =
1233        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1234    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1235
1236    float  *filterDensity =
1237        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1238    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1239
1240    float  *focalLength =
1241        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1242    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1243
1244    float  *focusDistance =
1245        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1246    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1247
1248    float  *focusRange =
1249        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1250    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1251
1252    uint8_t  *opticalStab =
1253        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1254    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1255
1256    /*int32_t  *focusState =
1257      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1258      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1259
1260    uint8_t  *noiseRedMode =
1261        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1262    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1263
1264    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1265
1266    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1267        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1268    int32_t scalerCropRegion[4];
1269    scalerCropRegion[0] = hScalerCropRegion->left;
1270    scalerCropRegion[1] = hScalerCropRegion->top;
1271    scalerCropRegion[2] = hScalerCropRegion->width;
1272    scalerCropRegion[3] = hScalerCropRegion->height;
1273    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1274
1275    int64_t  *sensorExpTime =
1276        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1277    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1278
1279    int64_t  *sensorFameDuration =
1280        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1281    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1282
1283    int32_t  *sensorSensitivity =
1284        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1285    mMetadataResponse.iso_speed = *sensorSensitivity;
1286    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1287
1288    uint8_t  *shadingMode =
1289        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1290    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1291
1292    uint8_t  *faceDetectMode =
1293        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1294    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1295
1296    uint8_t  *histogramMode =
1297        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1298    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1299
1300    uint8_t  *sharpnessMapMode =
1301        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1302    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1303            sharpnessMapMode, 1);
1304
1305    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1306    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1307        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1308    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1309            (int32_t*)sharpnessMap->sharpness,
1310            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1311
1312    resultMetadata = camMetadata.release();
1313    return resultMetadata;
1314}
1315
1316/*===========================================================================
1317 * FUNCTION   : convertToRegions
1318 *
1319 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1320 *
1321 * PARAMETERS :
1322 *   @rect   : cam_rect_t struct to convert
1323 *   @region : int32_t destination array
1324 *   @weight : if we are converting from cam_area_t, weight is valid
1325 *             else weight = -1
1326 *
1327 *==========================================================================*/
1328void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1329    region[0] = rect.left;
1330    region[1] = rect.top;
1331    region[2] = rect.left + rect.width;
1332    region[3] = rect.top + rect.height;
1333    if (weight > -1) {
1334        region[4] = weight;
1335    }
1336}
1337
1338/*===========================================================================
1339 * FUNCTION   : convertFromRegions
1340 *
1341 * DESCRIPTION: helper method to convert from array to cam_rect_t
1342 *
1343 * PARAMETERS :
1344 *   @rect   : cam_rect_t struct to convert
1345 *   @region : int32_t destination array
1346 *   @weight : if we are converting from cam_area_t, weight is valid
1347 *             else weight = -1
1348 *
1349 *==========================================================================*/
1350void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1351                                                   const camera_metadata_t *settings,
1352                                                   uint32_t tag){
1353    CameraMetadata frame_settings;
1354    frame_settings = settings;
1355    int32_t x_min = frame_settings.find(tag).data.i32[0];
1356    int32_t y_min = frame_settings.find(tag).data.i32[1];
1357    int32_t x_max = frame_settings.find(tag).data.i32[2];
1358    int32_t y_max = frame_settings.find(tag).data.i32[3];
1359    roi->weight = frame_settings.find(tag).data.i32[4];
1360    roi->rect.left = x_min;
1361    roi->rect.top = y_min;
1362    roi->rect.width = x_max - x_min;
1363    roi->rect.height = y_max - y_min;
1364}
1365
1366/*===========================================================================
1367 * FUNCTION   : resetIfNeededROI
1368 *
1369 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1370 *              crop region
1371 *
1372 * PARAMETERS :
1373 *   @roi       : cam_area_t struct to resize
1374 *   @scalerCropRegion : cam_crop_region_t region to compare against
1375 *
1376 *
1377 *==========================================================================*/
1378bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1379                                                 const cam_crop_region_t* scalerCropRegion)
1380{
1381    int32_t roi_x_max = roi->rect.width + roi->rect.left;
1382    int32_t roi_y_max = roi->rect.height + roi->rect.top;
1383    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1384    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1385    if ((roi_x_max < scalerCropRegion->left) ||
1386        (roi_y_max < scalerCropRegion->top)  ||
1387        (roi->rect.left > crop_x_max) ||
1388        (roi->rect.top > crop_y_max)){
1389        return false;
1390    }
1391    if (roi->rect.left < scalerCropRegion->left) {
1392        roi->rect.left = scalerCropRegion->left;
1393    }
1394    if (roi->rect.top < scalerCropRegion->top) {
1395        roi->rect.top = scalerCropRegion->top;
1396    }
1397    if (roi_x_max > crop_x_max) {
1398        roi_x_max = crop_x_max;
1399    }
1400    if (roi_y_max > crop_y_max) {
1401        roi_y_max = crop_y_max;
1402    }
1403    roi->rect.width = roi_x_max - roi->rect.left;
1404    roi->rect.height = roi_y_max - roi->rect.top;
1405    return true;
1406}
1407
1408/*===========================================================================
1409 * FUNCTION   : convertLandmarks
1410 *
1411 * DESCRIPTION: helper method to extract the landmarks from face detection info
1412 *
1413 * PARAMETERS :
1414 *   @face   : cam_rect_t struct to convert
1415 *   @landmarks : int32_t destination array
1416 *
1417 *
1418 *==========================================================================*/
1419void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1420{
1421    landmarks[0] = face.left_eye_center.x;
1422    landmarks[1] = face.left_eye_center.y;
1423    landmarks[2] = face.right_eye_center.y;
1424    landmarks[3] = face.right_eye_center.y;
1425    landmarks[4] = face.mouth_center.x;
1426    landmarks[5] = face.mouth_center.y;
1427}
1428
1429#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1430/*===========================================================================
1431 * FUNCTION   : initCapabilities
1432 *
1433 * DESCRIPTION: initialize camera capabilities in static data struct
1434 *
1435 * PARAMETERS :
1436 *   @cameraId  : camera Id
1437 *
1438 * RETURN     : int32_t type of status
1439 *              NO_ERROR  -- success
1440 *              none-zero failure code
1441 *==========================================================================*/
1442int QCamera3HardwareInterface::initCapabilities(int cameraId)
1443{
1444    int rc = 0;
1445    mm_camera_vtbl_t *cameraHandle = NULL;
1446    QCamera3HeapMemory *capabilityHeap = NULL;
1447
1448    cameraHandle = camera_open(cameraId);
1449    if (!cameraHandle) {
1450        ALOGE("%s: camera_open failed", __func__);
1451        rc = -1;
1452        goto open_failed;
1453    }
1454
1455    capabilityHeap = new QCamera3HeapMemory();
1456    if (capabilityHeap == NULL) {
1457        ALOGE("%s: creation of capabilityHeap failed", __func__);
1458        goto heap_creation_failed;
1459    }
1460    /* Allocate memory for capability buffer */
1461    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1462    if(rc != OK) {
1463        ALOGE("%s: No memory for cappability", __func__);
1464        goto allocate_failed;
1465    }
1466
1467    /* Map memory for capability buffer */
1468    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1469    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1470                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1471                                capabilityHeap->getFd(0),
1472                                sizeof(cam_capability_t));
1473    if(rc < 0) {
1474        ALOGE("%s: failed to map capability buffer", __func__);
1475        goto map_failed;
1476    }
1477
1478    /* Query Capability */
1479    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1480    if(rc < 0) {
1481        ALOGE("%s: failed to query capability",__func__);
1482        goto query_failed;
1483    }
1484    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1485    if (!gCamCapability[cameraId]) {
1486        ALOGE("%s: out of memory", __func__);
1487        goto query_failed;
1488    }
1489    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1490                                        sizeof(cam_capability_t));
1491    rc = 0;
1492
1493query_failed:
1494    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1495                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1496map_failed:
1497    capabilityHeap->deallocate();
1498allocate_failed:
1499    delete capabilityHeap;
1500heap_creation_failed:
1501    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1502    cameraHandle = NULL;
1503open_failed:
1504    return rc;
1505}
1506
1507/*===========================================================================
1508 * FUNCTION   : initParameters
1509 *
1510 * DESCRIPTION: initialize camera parameters
1511 *
1512 * PARAMETERS :
1513 *
1514 * RETURN     : int32_t type of status
1515 *              NO_ERROR  -- success
1516 *              none-zero failure code
1517 *==========================================================================*/
1518int QCamera3HardwareInterface::initParameters()
1519{
1520    int rc = 0;
1521
1522    //Allocate Set Param Buffer
1523    mParamHeap = new QCamera3HeapMemory();
1524    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1525    if(rc != OK) {
1526        rc = NO_MEMORY;
1527        ALOGE("Failed to allocate SETPARM Heap memory");
1528        delete mParamHeap;
1529        mParamHeap = NULL;
1530        return rc;
1531    }
1532
1533    //Map memory for parameters buffer
1534    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1535            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1536            mParamHeap->getFd(0),
1537            sizeof(parm_buffer_t));
1538    if(rc < 0) {
1539        ALOGE("%s:failed to map SETPARM buffer",__func__);
1540        rc = FAILED_TRANSACTION;
1541        mParamHeap->deallocate();
1542        delete mParamHeap;
1543        mParamHeap = NULL;
1544        return rc;
1545    }
1546
1547    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1548    return rc;
1549}
1550
1551/*===========================================================================
1552 * FUNCTION   : deinitParameters
1553 *
1554 * DESCRIPTION: de-initialize camera parameters
1555 *
1556 * PARAMETERS :
1557 *
1558 * RETURN     : NONE
1559 *==========================================================================*/
1560void QCamera3HardwareInterface::deinitParameters()
1561{
1562    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1563            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1564
1565    mParamHeap->deallocate();
1566    delete mParamHeap;
1567    mParamHeap = NULL;
1568
1569    mParameters = NULL;
1570}
1571
1572/*===========================================================================
1573 * FUNCTION   : calcMaxJpegSize
1574 *
1575 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1576 *
1577 * PARAMETERS :
1578 *
1579 * RETURN     : max_jpeg_size
1580 *==========================================================================*/
1581int QCamera3HardwareInterface::calcMaxJpegSize()
1582{
1583    int32_t max_jpeg_size = 0;
1584    int temp_width, temp_height;
1585    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1586        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1587        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1588        if (temp_width * temp_height > max_jpeg_size ) {
1589            max_jpeg_size = temp_width * temp_height;
1590        }
1591    }
1592    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1593    return max_jpeg_size;
1594}
1595
1596/*===========================================================================
1597 * FUNCTION   : initStaticMetadata
1598 *
1599 * DESCRIPTION: initialize the static metadata
1600 *
1601 * PARAMETERS :
1602 *   @cameraId  : camera Id
1603 *
1604 * RETURN     : int32_t type of status
1605 *              0  -- success
1606 *              non-zero failure code
1607 *==========================================================================*/
1608int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1609{
1610    int rc = 0;
1611    CameraMetadata staticInfo;
1612    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1613    /*HAL 3 only*/
1614    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1615                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1616
1617    /*hard coded for now but this should come from sensor*/
1618    float min_focus_distance;
1619    if(facingBack){
1620        min_focus_distance = 10;
1621    } else {
1622        min_focus_distance = 0;
1623    }
1624    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1625                    &min_focus_distance, 1);
1626
1627    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1628                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1629
1630    /*should be using focal lengths but sensor doesn't provide that info now*/
1631    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1632                      &gCamCapability[cameraId]->focal_length,
1633                      1);
1634
1635    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1636                      gCamCapability[cameraId]->apertures,
1637                      gCamCapability[cameraId]->apertures_count);
1638
1639    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1640                gCamCapability[cameraId]->filter_densities,
1641                gCamCapability[cameraId]->filter_densities_count);
1642
1643
1644    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1645                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1646                      gCamCapability[cameraId]->optical_stab_modes_count);
1647
1648    staticInfo.update(ANDROID_LENS_POSITION,
1649                      gCamCapability[cameraId]->lens_position,
1650                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1651
1652    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1653                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1654    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1655                      lens_shading_map_size,
1656                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1657
1658    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map,
1659            sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float));
1660
1661    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1662                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1663    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1664            geo_correction_map_size,
1665            sizeof(geo_correction_map_size)/sizeof(int32_t));
1666
1667    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1668                       gCamCapability[cameraId]->geo_correction_map,
1669                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1670
1671    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1672            gCamCapability[cameraId]->sensor_physical_size, 2);
1673
1674    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1675            gCamCapability[cameraId]->exposure_time_range, 2);
1676
1677    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1678            &gCamCapability[cameraId]->max_frame_duration, 1);
1679
1680
1681    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1682                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1683
1684    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1685                                               gCamCapability[cameraId]->pixel_array_size.height};
1686    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1687                      pixel_array_size, 2);
1688
1689    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width,
1690                                                gCamCapability[cameraId]->active_array_size.height};
1691
1692    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1693                      active_array_size, 2);
1694
1695    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1696            &gCamCapability[cameraId]->white_level, 1);
1697
1698    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1699            gCamCapability[cameraId]->black_level_pattern, 4);
1700
1701    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1702                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1703
1704    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1705                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1706
1707    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1708                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1709    /*hardcode 0 for now*/
1710    int32_t max_face_count = 0;
1711    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1712                      &max_face_count, 1);
1713
1714    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1715                      &gCamCapability[cameraId]->histogram_size, 1);
1716
1717    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1718            &gCamCapability[cameraId]->max_histogram_count, 1);
1719
1720    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1721                                                gCamCapability[cameraId]->sharpness_map_size.height};
1722
1723    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1724            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1725
1726    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1727            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1728
1729
1730    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1731                      &gCamCapability[cameraId]->raw_min_duration,
1732                       1);
1733
1734    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888};
1735    int scalar_formats_count = 1;
1736    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1737                      scalar_formats,
1738                      scalar_formats_count);
1739
1740    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1741    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1742              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1743              available_processed_sizes);
1744    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1745                available_processed_sizes,
1746                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1747
1748    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1749    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1750                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1751                 available_fps_ranges);
1752    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1753            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1754
1755    camera_metadata_rational exposureCompensationStep = {
1756            gCamCapability[cameraId]->exp_compensation_step.numerator,
1757            gCamCapability[cameraId]->exp_compensation_step.denominator};
1758    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1759                      &exposureCompensationStep, 1);
1760
1761    /*TO DO*/
1762    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1763    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1764                      availableVstabModes, sizeof(availableVstabModes));
1765
1766    /*HAL 1 and HAL 3 common*/
1767    float maxZoom = 4;
1768    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1769            &maxZoom, 1);
1770
1771    int32_t max3aRegions = 1;
1772    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1773            &max3aRegions, 1);
1774
1775    uint8_t availableFaceDetectModes[] = {
1776            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1777    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1778                      availableFaceDetectModes,
1779                      sizeof(availableFaceDetectModes));
1780
1781    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1782                                       gCamCapability[cameraId]->raw_dim.height};
1783    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1784                      raw_size,
1785                      sizeof(raw_size)/sizeof(uint32_t));
1786
1787    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1788                                                        gCamCapability[cameraId]->exposure_compensation_max};
1789    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1790            exposureCompensationRange,
1791            sizeof(exposureCompensationRange)/sizeof(int32_t));
1792
1793    uint8_t lensFacing = (facingBack) ?
1794            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1795    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1796
1797    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1798    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1799              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1800              available_jpeg_sizes);
1801    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1802                available_jpeg_sizes,
1803                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1804
1805    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1806                      available_thumbnail_sizes,
1807                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1808
1809    int32_t max_jpeg_size = 0;
1810    int temp_width, temp_height;
1811    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1812        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1813        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1814        if (temp_width * temp_height > max_jpeg_size ) {
1815            max_jpeg_size = temp_width * temp_height;
1816        }
1817    }
1818    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1819    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1820                      &max_jpeg_size, 1);
1821
1822    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1823    int32_t size = 0;
1824    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1825        int val = lookupFwkName(EFFECT_MODES_MAP,
1826                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1827                                   gCamCapability[cameraId]->supported_effects[i]);
1828        if (val != NAME_NOT_FOUND) {
1829            avail_effects[size] = (uint8_t)val;
1830            size++;
1831        }
1832    }
1833    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1834                      avail_effects,
1835                      size);
1836
1837    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1838    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1839    int32_t supported_scene_modes_cnt = 0;
1840    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1841        int val = lookupFwkName(SCENE_MODES_MAP,
1842                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1843                                gCamCapability[cameraId]->supported_scene_modes[i]);
1844        if (val != NAME_NOT_FOUND) {
1845            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1846            supported_indexes[supported_scene_modes_cnt] = i;
1847            supported_scene_modes_cnt++;
1848        }
1849    }
1850
1851    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1852                      avail_scene_modes,
1853                      supported_scene_modes_cnt);
1854
1855    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1856    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1857                      supported_scene_modes_cnt,
1858                      scene_mode_overrides,
1859                      supported_indexes,
1860                      cameraId);
1861    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1862                      scene_mode_overrides,
1863                      supported_scene_modes_cnt*3);
1864
1865    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1866    size = 0;
1867    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1868        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1869                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1870                                 gCamCapability[cameraId]->supported_antibandings[i]);
1871        if (val != NAME_NOT_FOUND) {
1872            avail_antibanding_modes[size] = (uint8_t)val;
1873            size++;
1874        }
1875
1876    }
1877    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1878                      avail_antibanding_modes,
1879                      size);
1880
1881    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1882    size = 0;
1883    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1884        int val = lookupFwkName(FOCUS_MODES_MAP,
1885                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1886                                gCamCapability[cameraId]->supported_focus_modes[i]);
1887        if (val != NAME_NOT_FOUND) {
1888            avail_af_modes[size] = (uint8_t)val;
1889            size++;
1890        }
1891    }
1892    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1893                      avail_af_modes,
1894                      size);
1895
1896    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1897    size = 0;
1898    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1899        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1900                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1901                                    gCamCapability[cameraId]->supported_white_balances[i]);
1902        if (val != NAME_NOT_FOUND) {
1903            avail_awb_modes[size] = (uint8_t)val;
1904            size++;
1905        }
1906    }
1907    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1908                      avail_awb_modes,
1909                      size);
1910
1911    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1912    size = 0;
1913    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1914        int val = lookupFwkName(FLASH_MODES_MAP,
1915                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1916                                gCamCapability[cameraId]->supported_flash_modes[i]);
1917        if (val != NAME_NOT_FOUND) {
1918            avail_flash_modes[size] = (uint8_t)val;
1919            size++;
1920        }
1921    }
1922    static uint8_t flashAvailable = 0;
1923    if (size > 1) {
1924        //flash is supported
1925        flashAvailable = 1;
1926    }
1927    staticInfo.update(ANDROID_FLASH_MODE,
1928                      avail_flash_modes,
1929                      size);
1930
1931    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
1932            &flashAvailable, 1);
1933
1934    uint8_t avail_ae_modes[5];
1935    size = 0;
1936    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
1937        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
1938        size++;
1939    }
1940    if (flashAvailable) {
1941        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
1942        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
1943        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
1944    }
1945    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1946                      avail_ae_modes,
1947                      size);
1948    size = 0;
1949    int32_t avail_sensitivities[CAM_ISO_MODE_MAX];
1950    for (int i = 0; i < gCamCapability[cameraId]->supported_iso_modes_cnt; i++) {
1951        int32_t sensitivity = getSensorSensitivity(gCamCapability[cameraId]->supported_iso_modes[i]);
1952        if (sensitivity != -1) {
1953            avail_sensitivities[size] = sensitivity;
1954            size++;
1955        }
1956    }
1957    staticInfo.update(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES,
1958                      avail_sensitivities,
1959                      size);
1960
1961    gStaticMetadata[cameraId] = staticInfo.release();
1962    return rc;
1963}
1964
1965/*===========================================================================
1966 * FUNCTION   : makeTable
1967 *
1968 * DESCRIPTION: make a table of sizes
1969 *
1970 * PARAMETERS :
1971 *
1972 *
1973 *==========================================================================*/
1974void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
1975                                          int32_t* sizeTable)
1976{
1977    int j = 0;
1978    for (int i = 0; i < size; i++) {
1979        sizeTable[j] = dimTable[i].width;
1980        sizeTable[j+1] = dimTable[i].height;
1981        j+=2;
1982    }
1983}
1984
1985/*===========================================================================
1986 * FUNCTION   : makeFPSTable
1987 *
1988 * DESCRIPTION: make a table of fps ranges
1989 *
1990 * PARAMETERS :
1991 *
1992 *==========================================================================*/
1993void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
1994                                          int32_t* fpsRangesTable)
1995{
1996    int j = 0;
1997    for (int i = 0; i < size; i++) {
1998        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
1999        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2000        j+=2;
2001    }
2002}
2003
2004/*===========================================================================
2005 * FUNCTION   : makeOverridesList
2006 *
2007 * DESCRIPTION: make a list of scene mode overrides
2008 *
2009 * PARAMETERS :
2010 *
2011 *
2012 *==========================================================================*/
2013void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2014                                                  uint8_t size, uint8_t* overridesList,
2015                                                  uint8_t* supported_indexes,
2016                                                  int camera_id)
2017{
2018    /*daemon will give a list of overrides for all scene modes.
2019      However we should send the fwk only the overrides for the scene modes
2020      supported by the framework*/
2021    int j = 0, index = 0, supt = 0;
2022    uint8_t focus_override;
2023    for (int i = 0; i < size; i++) {
2024        supt = 0;
2025        index = supported_indexes[i];
2026        overridesList[j] = (uint8_t)overridesTable[index].ae_mode;
2027        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2028                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2029                                                    overridesTable[index].awb_mode);
2030        focus_override = (uint8_t)overridesTable[index].af_mode;
2031        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2032           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2033              supt = 1;
2034              break;
2035           }
2036        }
2037        if (supt) {
2038           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2039                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2040                                              focus_override);
2041        } else {
2042           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2043        }
2044        j+=3;
2045    }
2046}
2047
2048/*===========================================================================
2049 * FUNCTION   : getPreviewHalPixelFormat
2050 *
2051 * DESCRIPTION: convert the format to type recognized by framework
2052 *
2053 * PARAMETERS : format : the format from backend
2054 *
2055 ** RETURN    : format recognized by framework
2056 *
2057 *==========================================================================*/
2058int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2059{
2060    int32_t halPixelFormat;
2061
2062    switch (format) {
2063    case CAM_FORMAT_YUV_420_NV12:
2064        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2065        break;
2066    case CAM_FORMAT_YUV_420_NV21:
2067        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2068        break;
2069    case CAM_FORMAT_YUV_420_NV21_ADRENO:
2070        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2071        break;
2072    case CAM_FORMAT_YUV_420_YV12:
2073        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2074        break;
2075    case CAM_FORMAT_YUV_422_NV16:
2076    case CAM_FORMAT_YUV_422_NV61:
2077    default:
2078        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2079        break;
2080    }
2081    return halPixelFormat;
2082}
2083
2084/*===========================================================================
2085 * FUNCTION   : getSensorSensitivity
2086 *
2087 * DESCRIPTION: convert iso_mode to an integer value
2088 *
2089 * PARAMETERS : iso_mode : the iso_mode supported by sensor
2090 *
2091 ** RETURN    : sensitivity supported by sensor
2092 *
2093 *==========================================================================*/
2094int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2095{
2096    int32_t sensitivity;
2097
2098    switch (iso_mode) {
2099    case CAM_ISO_MODE_100:
2100        sensitivity = 100;
2101        break;
2102    case CAM_ISO_MODE_200:
2103        sensitivity = 200;
2104        break;
2105    case CAM_ISO_MODE_400:
2106        sensitivity = 400;
2107        break;
2108    case CAM_ISO_MODE_800:
2109        sensitivity = 800;
2110        break;
2111    case CAM_ISO_MODE_1600:
2112        sensitivity = 1600;
2113        break;
2114    default:
2115        sensitivity = -1;
2116        break;
2117    }
2118    return sensitivity;
2119}
2120
2121
2122/*===========================================================================
2123 * FUNCTION   : AddSetParmEntryToBatch
2124 *
2125 * DESCRIPTION: add set parameter entry into batch
2126 *
2127 * PARAMETERS :
2128 *   @p_table     : ptr to parameter buffer
2129 *   @paramType   : parameter type
2130 *   @paramLength : length of parameter value
2131 *   @paramValue  : ptr to parameter value
2132 *
2133 * RETURN     : int32_t type of status
2134 *              NO_ERROR  -- success
2135 *              none-zero failure code
2136 *==========================================================================*/
2137int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2138                                                          cam_intf_parm_type_t paramType,
2139                                                          uint32_t paramLength,
2140                                                          void *paramValue)
2141{
2142    int position = paramType;
2143    int current, next;
2144
2145    /*************************************************************************
2146    *                 Code to take care of linking next flags                *
2147    *************************************************************************/
2148    current = GET_FIRST_PARAM_ID(p_table);
2149    if (position == current){
2150        //DO NOTHING
2151    } else if (position < current){
2152        SET_NEXT_PARAM_ID(position, p_table, current);
2153        SET_FIRST_PARAM_ID(p_table, position);
2154    } else {
2155        /* Search for the position in the linked list where we need to slot in*/
2156        while (position > GET_NEXT_PARAM_ID(current, p_table))
2157            current = GET_NEXT_PARAM_ID(current, p_table);
2158
2159        /*If node already exists no need to alter linking*/
2160        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2161            next = GET_NEXT_PARAM_ID(current, p_table);
2162            SET_NEXT_PARAM_ID(current, p_table, position);
2163            SET_NEXT_PARAM_ID(position, p_table, next);
2164        }
2165    }
2166
2167    /*************************************************************************
2168    *                   Copy contents into entry                             *
2169    *************************************************************************/
2170
2171    if (paramLength > sizeof(parm_type_t)) {
2172        ALOGE("%s:Size of input larger than max entry size",__func__);
2173        return BAD_VALUE;
2174    }
2175    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2176    return NO_ERROR;
2177}
2178
2179/*===========================================================================
2180 * FUNCTION   : lookupFwkName
2181 *
2182 * DESCRIPTION: In case the enum is not same in fwk and backend
2183 *              make sure the parameter is correctly propogated
2184 *
2185 * PARAMETERS  :
2186 *   @arr      : map between the two enums
2187 *   @len      : len of the map
2188 *   @hal_name : name of the hal_parm to map
2189 *
2190 * RETURN     : int type of status
2191 *              fwk_name  -- success
2192 *              none-zero failure code
2193 *==========================================================================*/
2194int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2195                                             int len, int hal_name)
2196{
2197
2198    for (int i = 0; i < len; i++) {
2199        if (arr[i].hal_name == hal_name)
2200            return arr[i].fwk_name;
2201    }
2202
2203    /* Not able to find matching framework type is not necessarily
2204     * an error case. This happens when mm-camera supports more attributes
2205     * than the frameworks do */
2206    ALOGD("%s: Cannot find matching framework type", __func__);
2207    return NAME_NOT_FOUND;
2208}
2209
2210/*===========================================================================
2211 * FUNCTION   : lookupHalName
2212 *
2213 * DESCRIPTION: In case the enum is not same in fwk and backend
2214 *              make sure the parameter is correctly propogated
2215 *
2216 * PARAMETERS  :
2217 *   @arr      : map between the two enums
2218 *   @len      : len of the map
2219 *   @fwk_name : name of the hal_parm to map
2220 *
2221 * RETURN     : int32_t type of status
2222 *              hal_name  -- success
2223 *              none-zero failure code
2224 *==========================================================================*/
2225int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2226                                             int len, int fwk_name)
2227{
2228    for (int i = 0; i < len; i++) {
2229       if (arr[i].fwk_name == fwk_name)
2230           return arr[i].hal_name;
2231    }
2232    ALOGE("%s: Cannot find matching hal type", __func__);
2233    return NAME_NOT_FOUND;
2234}
2235
2236/*===========================================================================
2237 * FUNCTION   : getCapabilities
2238 *
2239 * DESCRIPTION: query camera capabilities
2240 *
2241 * PARAMETERS :
2242 *   @cameraId  : camera Id
2243 *   @info      : camera info struct to be filled in with camera capabilities
2244 *
2245 * RETURN     : int32_t type of status
2246 *              NO_ERROR  -- success
2247 *              none-zero failure code
2248 *==========================================================================*/
2249int QCamera3HardwareInterface::getCamInfo(int cameraId,
2250                                    struct camera_info *info)
2251{
2252    int rc = 0;
2253
2254    if (NULL == gCamCapability[cameraId]) {
2255        rc = initCapabilities(cameraId);
2256        if (rc < 0) {
2257            //pthread_mutex_unlock(&g_camlock);
2258            return rc;
2259        }
2260    }
2261
2262    if (NULL == gStaticMetadata[cameraId]) {
2263        rc = initStaticMetadata(cameraId);
2264        if (rc < 0) {
2265            return rc;
2266        }
2267    }
2268
2269    switch(gCamCapability[cameraId]->position) {
2270    case CAM_POSITION_BACK:
2271        info->facing = CAMERA_FACING_BACK;
2272        break;
2273
2274    case CAM_POSITION_FRONT:
2275        info->facing = CAMERA_FACING_FRONT;
2276        break;
2277
2278    default:
2279        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2280        rc = -1;
2281        break;
2282    }
2283
2284
2285    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2286    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2287    info->static_camera_characteristics = gStaticMetadata[cameraId];
2288
2289    return rc;
2290}
2291
2292/*===========================================================================
2293 * FUNCTION   : translateMetadata
2294 *
2295 * DESCRIPTION: translate the metadata into camera_metadata_t
2296 *
2297 * PARAMETERS : type of the request
2298 *
2299 *
2300 * RETURN     : success: camera_metadata_t*
2301 *              failure: NULL
2302 *
2303 *==========================================================================*/
2304camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2305{
2306    pthread_mutex_lock(&mMutex);
2307
2308    if (mDefaultMetadata[type] != NULL) {
2309        pthread_mutex_unlock(&mMutex);
2310        return mDefaultMetadata[type];
2311    }
2312    //first time we are handling this request
2313    //fill up the metadata structure using the wrapper class
2314    CameraMetadata settings;
2315    //translate from cam_capability_t to camera_metadata_tag_t
2316    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2317    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2318
2319    /*control*/
2320
2321    uint8_t controlIntent = 0;
2322    switch (type) {
2323      case CAMERA3_TEMPLATE_PREVIEW:
2324        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2325        break;
2326      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2327        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2328        break;
2329      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2330        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2331        break;
2332      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2333        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2334        break;
2335      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2336        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2337        break;
2338      default:
2339        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2340        break;
2341    }
2342    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2343
2344    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2345            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2346
2347    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2348    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2349
2350    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2351    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2352
2353    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2354    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2355
2356    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2357    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2358
2359    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2360    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2361
2362    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2363    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2364
2365    static uint8_t focusMode;
2366    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2367        ALOGE("%s: Setting focus mode to auto", __func__);
2368        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2369    } else {
2370        ALOGE("%s: Setting focus mode to off", __func__);
2371        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2372    }
2373    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2374
2375    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2376    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2377
2378    /*flash*/
2379    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2380    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2381
2382
2383    /* lens */
2384    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2385    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2386
2387    if (gCamCapability[mCameraId]->filter_densities_count) {
2388        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2389        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2390                        gCamCapability[mCameraId]->filter_densities_count);
2391    }
2392
2393    float default_focal_length = gCamCapability[mCameraId]->focal_length;
2394    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2395
2396    mDefaultMetadata[type] = settings.release();
2397
2398    pthread_mutex_unlock(&mMutex);
2399    return mDefaultMetadata[type];
2400}
2401
2402/*===========================================================================
2403 * FUNCTION   : setFrameParameters
2404 *
2405 * DESCRIPTION: set parameters per frame as requested in the metadata from
2406 *              framework
2407 *
2408 * PARAMETERS :
2409 *   @settings  : frame settings information from framework
2410 *
2411 *
2412 * RETURN     : success: NO_ERROR
2413 *              failure:
2414 *==========================================================================*/
2415int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2416                                                  const camera_metadata_t *settings)
2417{
2418    /*translate from camera_metadata_t type to parm_type_t*/
2419    int rc = 0;
2420    if (settings == NULL && mFirstRequest) {
2421        /*settings cannot be null for the first request*/
2422        return BAD_VALUE;
2423    }
2424
2425    int32_t hal_version = CAM_HAL_V3;
2426
2427    memset(mParameters, 0, sizeof(parm_buffer_t));
2428    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2429    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2430                sizeof(hal_version), &hal_version);
2431
2432    /*we need to update the frame number in the parameters*/
2433    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2434                                sizeof(frame_id), &frame_id);
2435    if (rc < 0) {
2436        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2437        return BAD_VALUE;
2438    }
2439
2440    if(settings != NULL){
2441        rc = translateMetadataToParameters(settings);
2442    }
2443    /*set the parameters to backend*/
2444    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2445    return rc;
2446}
2447
2448/*===========================================================================
2449 * FUNCTION   : translateMetadataToParameters
2450 *
2451 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2452 *
2453 *
2454 * PARAMETERS :
2455 *   @settings  : frame settings information from framework
2456 *
2457 *
2458 * RETURN     : success: NO_ERROR
2459 *              failure:
2460 *==========================================================================*/
2461int QCamera3HardwareInterface::translateMetadataToParameters
2462                                  (const camera_metadata_t *settings)
2463{
2464    int rc = 0;
2465    CameraMetadata frame_settings;
2466    frame_settings = settings;
2467
2468
2469    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2470        int32_t antibandingMode =
2471            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2472        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2473                sizeof(antibandingMode), &antibandingMode);
2474    }
2475
2476    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2477        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2478        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2479          sizeof(expCompensation), &expCompensation);
2480    }
2481
2482    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2483        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2484        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2485                sizeof(aeLock), &aeLock);
2486    }
2487
2488    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2489        cam_fps_range_t fps_range;
2490        fps_range.min_fps =
2491            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2492        fps_range.max_fps =
2493            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2494        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2495                sizeof(fps_range), &fps_range);
2496    }
2497
2498    float focalDistance = -1.0;
2499    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2500        focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2501        rc = AddSetParmEntryToBatch(mParameters,
2502                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2503                sizeof(focalDistance), &focalDistance);
2504    }
2505
2506    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2507        uint8_t fwk_focusMode =
2508            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2509        uint8_t focusMode;
2510        if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2511            focusMode = CAM_FOCUS_MODE_INFINITY;
2512        } else{
2513         focusMode = lookupHalName(FOCUS_MODES_MAP,
2514                                   sizeof(FOCUS_MODES_MAP),
2515                                   fwk_focusMode);
2516        }
2517        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2518                sizeof(focusMode), &focusMode);
2519    }
2520
2521    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2522        uint8_t awbLock =
2523            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2524        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2525                sizeof(awbLock), &awbLock);
2526    }
2527
2528    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2529        uint8_t fwk_whiteLevel =
2530            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2531        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2532                sizeof(WHITE_BALANCE_MODES_MAP),
2533                fwk_whiteLevel);
2534        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2535                sizeof(whiteLevel), &whiteLevel);
2536    }
2537
2538    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2539        uint8_t fwk_effectMode =
2540            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2541        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2542                sizeof(EFFECT_MODES_MAP),
2543                fwk_effectMode);
2544        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2545                sizeof(effectMode), &effectMode);
2546    }
2547
2548    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2549        uint8_t fwk_aeMode =
2550            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2551        uint8_t aeMode;
2552        int32_t redeye;
2553        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2554            aeMode = CAM_AE_MODE_OFF;
2555        } else {
2556            aeMode = CAM_AE_MODE_ON;
2557        }
2558        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2559            redeye = 1;
2560        } else {
2561            redeye = 0;
2562        }
2563        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2564                                          sizeof(AE_FLASH_MODE_MAP),
2565                                          aeMode);
2566        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2567                sizeof(aeMode), &aeMode);
2568        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2569                sizeof(flashMode), &flashMode);
2570        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2571                sizeof(redeye), &redeye);
2572    }
2573
2574    if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) {
2575        int32_t metaFrameNumber =
2576            frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0];
2577        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2578                sizeof(metaFrameNumber), &metaFrameNumber);
2579    }
2580
2581    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2582        uint8_t colorCorrectMode =
2583            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2584        rc =
2585            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2586                    sizeof(colorCorrectMode), &colorCorrectMode);
2587    }
2588    cam_trigger_t aecTrigger;
2589    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2590    aecTrigger.trigger_id = -1;
2591    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2592        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2593        aecTrigger.trigger =
2594            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2595        aecTrigger.trigger_id =
2596            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2597    }
2598    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2599                                sizeof(aecTrigger), &aecTrigger);
2600
2601    /*af_trigger must come with a trigger id*/
2602    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2603        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2604        cam_trigger_t af_trigger;
2605        af_trigger.trigger =
2606            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2607        af_trigger.trigger_id =
2608            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2609        rc = AddSetParmEntryToBatch(mParameters,
2610                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2611    }
2612
2613    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2614        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2615        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2616                sizeof(metaMode), &metaMode);
2617        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2618           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2619           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2620                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2621                                             fwk_sceneMode);
2622           rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2623                sizeof(sceneMode), &sceneMode);
2624        }
2625    }
2626
2627    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2628        int32_t demosaic =
2629            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2630        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2631                sizeof(demosaic), &demosaic);
2632    }
2633
2634    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2635        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2636        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2637                sizeof(edgeMode), &edgeMode);
2638    }
2639
2640    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2641        int32_t edgeStrength =
2642            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2643        rc = AddSetParmEntryToBatch(mParameters,
2644                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2645    }
2646
2647    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2648        uint8_t flashMode =
2649            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2650        rc = AddSetParmEntryToBatch(mParameters,
2651                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2652    }
2653
2654    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2655        uint8_t flashPower =
2656            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2657        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2658                sizeof(flashPower), &flashPower);
2659    }
2660
2661    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2662        int64_t flashFiringTime =
2663            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2664        rc = AddSetParmEntryToBatch(mParameters,
2665                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2666    }
2667
2668    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2669        uint8_t geometricMode =
2670            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2671        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2672                sizeof(geometricMode), &geometricMode);
2673    }
2674
2675    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2676        uint8_t geometricStrength =
2677            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2678        rc = AddSetParmEntryToBatch(mParameters,
2679                CAM_INTF_META_GEOMETRIC_STRENGTH,
2680                sizeof(geometricStrength), &geometricStrength);
2681    }
2682
2683    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2684        uint8_t hotPixelMode =
2685            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2686        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2687                sizeof(hotPixelMode), &hotPixelMode);
2688    }
2689
2690    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2691        float lensAperture =
2692            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2693        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2694                sizeof(lensAperture), &lensAperture);
2695    }
2696
2697    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2698        float filterDensity =
2699            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2700        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2701                sizeof(filterDensity), &filterDensity);
2702    }
2703
2704    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2705        float focalLength =
2706            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2707        rc = AddSetParmEntryToBatch(mParameters,
2708                CAM_INTF_META_LENS_FOCAL_LENGTH,
2709                sizeof(focalLength), &focalLength);
2710    }
2711
2712    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2713        uint8_t optStabMode =
2714            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2715        rc = AddSetParmEntryToBatch(mParameters,
2716                CAM_INTF_META_LENS_OPT_STAB_MODE,
2717                sizeof(optStabMode), &optStabMode);
2718    }
2719
2720    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2721        uint8_t noiseRedMode =
2722            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2723        rc = AddSetParmEntryToBatch(mParameters,
2724                CAM_INTF_META_NOISE_REDUCTION_MODE,
2725                sizeof(noiseRedMode), &noiseRedMode);
2726    }
2727
2728    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2729        uint8_t noiseRedStrength =
2730            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2731        rc = AddSetParmEntryToBatch(mParameters,
2732                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2733                sizeof(noiseRedStrength), &noiseRedStrength);
2734    }
2735
2736    cam_crop_region_t scalerCropRegion;
2737    bool scalerCropSet = false;
2738    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2739        scalerCropRegion.left =
2740            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2741        scalerCropRegion.top =
2742            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2743        scalerCropRegion.width =
2744            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2745        scalerCropRegion.height =
2746            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2747        rc = AddSetParmEntryToBatch(mParameters,
2748                CAM_INTF_META_SCALER_CROP_REGION,
2749                sizeof(scalerCropRegion), &scalerCropRegion);
2750        scalerCropSet = true;
2751    }
2752
2753    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2754        int64_t sensorExpTime =
2755            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2756        rc = AddSetParmEntryToBatch(mParameters,
2757                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2758                sizeof(sensorExpTime), &sensorExpTime);
2759    }
2760
2761    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2762        int64_t sensorFrameDuration =
2763            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2764        rc = AddSetParmEntryToBatch(mParameters,
2765                CAM_INTF_META_SENSOR_FRAME_DURATION,
2766                sizeof(sensorFrameDuration), &sensorFrameDuration);
2767    }
2768
2769    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2770        int32_t sensorSensitivity =
2771            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2772        rc = AddSetParmEntryToBatch(mParameters,
2773                CAM_INTF_META_SENSOR_SENSITIVITY,
2774                sizeof(sensorSensitivity), &sensorSensitivity);
2775    }
2776
2777    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2778        int32_t shadingMode =
2779            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2780        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2781                sizeof(shadingMode), &shadingMode);
2782    }
2783
2784    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2785        uint8_t shadingStrength =
2786            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2787        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2788                sizeof(shadingStrength), &shadingStrength);
2789    }
2790
2791    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2792        uint8_t facedetectMode =
2793            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2794        rc = AddSetParmEntryToBatch(mParameters,
2795                CAM_INTF_META_STATS_FACEDETECT_MODE,
2796                sizeof(facedetectMode), &facedetectMode);
2797    }
2798
2799    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2800        uint8_t histogramMode =
2801            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2802        rc = AddSetParmEntryToBatch(mParameters,
2803                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2804                sizeof(histogramMode), &histogramMode);
2805    }
2806
2807    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2808        uint8_t sharpnessMapMode =
2809            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2810        rc = AddSetParmEntryToBatch(mParameters,
2811                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2812                sizeof(sharpnessMapMode), &sharpnessMapMode);
2813    }
2814
2815    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2816        uint8_t tonemapMode =
2817            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2818        rc = AddSetParmEntryToBatch(mParameters,
2819                CAM_INTF_META_TONEMAP_MODE,
2820                sizeof(tonemapMode), &tonemapMode);
2821    }
2822
2823    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2824        uint8_t captureIntent =
2825            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2826        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2827                sizeof(captureIntent), &captureIntent);
2828    }
2829
2830    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2831        cam_area_t roi;
2832        bool reset = true;
2833        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2834        if (scalerCropSet) {
2835            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2836        }
2837        if (reset) {
2838            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2839                    sizeof(roi), &roi);
2840        }
2841    }
2842
2843    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2844        cam_area_t roi;
2845        bool reset = true;
2846        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2847        if (scalerCropSet) {
2848            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2849        }
2850        if (reset) {
2851            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2852                    sizeof(roi), &roi);
2853        }
2854    }
2855
2856    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2857        cam_area_t roi;
2858        bool reset = true;
2859        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2860        if (scalerCropSet) {
2861            reset = resetIfNeededROI(&roi, &scalerCropRegion);
2862        }
2863        if (reset) {
2864            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2865                    sizeof(roi), &roi);
2866        }
2867    }
2868    return rc;
2869}
2870
2871/*===========================================================================
2872 * FUNCTION   : getJpegSettings
2873 *
2874 * DESCRIPTION: save the jpeg settings in the HAL
2875 *
2876 *
2877 * PARAMETERS :
2878 *   @settings  : frame settings information from framework
2879 *
2880 *
2881 * RETURN     : success: NO_ERROR
2882 *              failure:
2883 *==========================================================================*/
2884int QCamera3HardwareInterface::getJpegSettings
2885                                  (const camera_metadata_t *settings)
2886{
2887    if (mJpegSettings) {
2888        if (mJpegSettings->gps_timestamp) {
2889            free(mJpegSettings->gps_timestamp);
2890            mJpegSettings->gps_timestamp = NULL;
2891        }
2892        if (mJpegSettings->gps_coordinates) {
2893            for (int i = 0; i < 3; i++) {
2894                free(mJpegSettings->gps_coordinates[i]);
2895                mJpegSettings->gps_coordinates[i] = NULL;
2896            }
2897        }
2898        free(mJpegSettings);
2899        mJpegSettings = NULL;
2900    }
2901    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2902    CameraMetadata jpeg_settings;
2903    jpeg_settings = settings;
2904
2905    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2906        mJpegSettings->jpeg_orientation =
2907            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
2908    } else {
2909        mJpegSettings->jpeg_orientation = 0;
2910    }
2911    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
2912        mJpegSettings->jpeg_quality =
2913            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
2914    } else {
2915        mJpegSettings->jpeg_quality = 85;
2916    }
2917    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2918        mJpegSettings->thumbnail_size.width =
2919            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
2920        mJpegSettings->thumbnail_size.height =
2921            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
2922    } else {
2923        mJpegSettings->thumbnail_size.width = 0;
2924        mJpegSettings->thumbnail_size.height = 0;
2925    }
2926    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
2927        for (int i = 0; i < 3; i++) {
2928            mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
2929            *(mJpegSettings->gps_coordinates[i]) =
2930                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
2931        }
2932    } else{
2933       for (int i = 0; i < 3; i++) {
2934            mJpegSettings->gps_coordinates[i] = NULL;
2935        }
2936    }
2937
2938    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
2939        mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
2940        *(mJpegSettings->gps_timestamp) =
2941            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
2942    } else {
2943        mJpegSettings->gps_timestamp = NULL;
2944    }
2945
2946    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
2947        int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
2948        for (int i = 0; i < len; i++) {
2949            mJpegSettings->gps_processing_method[i] =
2950                jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
2951        }
2952        if (mJpegSettings->gps_processing_method[len-1] != '\0') {
2953            mJpegSettings->gps_processing_method[len] = '\0';
2954        }
2955    } else {
2956        mJpegSettings->gps_processing_method[0] = '\0';
2957    }
2958
2959    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2960        mJpegSettings->sensor_sensitivity =
2961            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2962    } else {
2963        mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
2964    }
2965
2966    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2967        mJpegSettings->lens_focal_length =
2968            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2969    }
2970    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2971        mJpegSettings->exposure_compensation =
2972            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2973    }
2974    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
2975    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
2976    return 0;
2977}
2978
2979/*===========================================================================
2980 * FUNCTION   : captureResultCb
2981 *
2982 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
2983 *
2984 * PARAMETERS :
2985 *   @frame  : frame information from mm-camera-interface
2986 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
2987 *   @userdata: userdata
2988 *
2989 * RETURN     : NONE
2990 *==========================================================================*/
2991void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
2992                camera3_stream_buffer_t *buffer,
2993                uint32_t frame_number, void *userdata)
2994{
2995    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
2996    if (hw == NULL) {
2997        ALOGE("%s: Invalid hw %p", __func__, hw);
2998        return;
2999    }
3000
3001    hw->captureResultCb(metadata, buffer, frame_number);
3002    return;
3003}
3004
3005/*===========================================================================
3006 * FUNCTION   : initialize
3007 *
3008 * DESCRIPTION: Pass framework callback pointers to HAL
3009 *
3010 * PARAMETERS :
3011 *
3012 *
3013 * RETURN     : Success : 0
3014 *              Failure: -ENODEV
3015 *==========================================================================*/
3016
3017int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3018                                  const camera3_callback_ops_t *callback_ops)
3019{
3020    ALOGV("%s: E", __func__);
3021    QCamera3HardwareInterface *hw =
3022        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3023    if (!hw) {
3024        ALOGE("%s: NULL camera device", __func__);
3025        return -ENODEV;
3026    }
3027
3028    int rc = hw->initialize(callback_ops);
3029    ALOGV("%s: X", __func__);
3030    return rc;
3031}
3032
3033/*===========================================================================
3034 * FUNCTION   : configure_streams
3035 *
3036 * DESCRIPTION:
3037 *
3038 * PARAMETERS :
3039 *
3040 *
3041 * RETURN     : Success: 0
3042 *              Failure: -EINVAL (if stream configuration is invalid)
3043 *                       -ENODEV (fatal error)
3044 *==========================================================================*/
3045
3046int QCamera3HardwareInterface::configure_streams(
3047        const struct camera3_device *device,
3048        camera3_stream_configuration_t *stream_list)
3049{
3050    ALOGV("%s: E", __func__);
3051    QCamera3HardwareInterface *hw =
3052        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3053    if (!hw) {
3054        ALOGE("%s: NULL camera device", __func__);
3055        return -ENODEV;
3056    }
3057    int rc = hw->configureStreams(stream_list);
3058    ALOGV("%s: X", __func__);
3059    return rc;
3060}
3061
3062/*===========================================================================
3063 * FUNCTION   : register_stream_buffers
3064 *
3065 * DESCRIPTION: Register stream buffers with the device
3066 *
3067 * PARAMETERS :
3068 *
3069 * RETURN     :
3070 *==========================================================================*/
3071int QCamera3HardwareInterface::register_stream_buffers(
3072        const struct camera3_device *device,
3073        const camera3_stream_buffer_set_t *buffer_set)
3074{
3075    ALOGV("%s: E", __func__);
3076    QCamera3HardwareInterface *hw =
3077        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3078    if (!hw) {
3079        ALOGE("%s: NULL camera device", __func__);
3080        return -ENODEV;
3081    }
3082    int rc = hw->registerStreamBuffers(buffer_set);
3083    ALOGV("%s: X", __func__);
3084    return rc;
3085}
3086
3087/*===========================================================================
3088 * FUNCTION   : construct_default_request_settings
3089 *
3090 * DESCRIPTION: Configure a settings buffer to meet the required use case
3091 *
3092 * PARAMETERS :
3093 *
3094 *
3095 * RETURN     : Success: Return valid metadata
3096 *              Failure: Return NULL
3097 *==========================================================================*/
3098const camera_metadata_t* QCamera3HardwareInterface::
3099    construct_default_request_settings(const struct camera3_device *device,
3100                                        int type)
3101{
3102
3103    ALOGV("%s: E", __func__);
3104    camera_metadata_t* fwk_metadata = NULL;
3105    QCamera3HardwareInterface *hw =
3106        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3107    if (!hw) {
3108        ALOGE("%s: NULL camera device", __func__);
3109        return NULL;
3110    }
3111
3112    fwk_metadata = hw->translateCapabilityToMetadata(type);
3113
3114    ALOGV("%s: X", __func__);
3115    return fwk_metadata;
3116}
3117
3118/*===========================================================================
3119 * FUNCTION   : process_capture_request
3120 *
3121 * DESCRIPTION:
3122 *
3123 * PARAMETERS :
3124 *
3125 *
3126 * RETURN     :
3127 *==========================================================================*/
3128int QCamera3HardwareInterface::process_capture_request(
3129                    const struct camera3_device *device,
3130                    camera3_capture_request_t *request)
3131{
3132    ALOGV("%s: E", __func__);
3133    QCamera3HardwareInterface *hw =
3134        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3135    if (!hw) {
3136        ALOGE("%s: NULL camera device", __func__);
3137        return -EINVAL;
3138    }
3139
3140    int rc = hw->processCaptureRequest(request);
3141    ALOGV("%s: X", __func__);
3142    return rc;
3143}
3144
3145/*===========================================================================
3146 * FUNCTION   : get_metadata_vendor_tag_ops
3147 *
3148 * DESCRIPTION:
3149 *
3150 * PARAMETERS :
3151 *
3152 *
3153 * RETURN     :
3154 *==========================================================================*/
3155
3156void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3157                const struct camera3_device *device,
3158                vendor_tag_query_ops_t* ops)
3159{
3160    ALOGV("%s: E", __func__);
3161    QCamera3HardwareInterface *hw =
3162        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3163    if (!hw) {
3164        ALOGE("%s: NULL camera device", __func__);
3165        return;
3166    }
3167
3168    hw->getMetadataVendorTagOps(ops);
3169    ALOGV("%s: X", __func__);
3170    return;
3171}
3172
3173/*===========================================================================
3174 * FUNCTION   : dump
3175 *
3176 * DESCRIPTION:
3177 *
3178 * PARAMETERS :
3179 *
3180 *
3181 * RETURN     :
3182 *==========================================================================*/
3183
3184void QCamera3HardwareInterface::dump(
3185                const struct camera3_device *device, int fd)
3186{
3187    ALOGV("%s: E", __func__);
3188    QCamera3HardwareInterface *hw =
3189        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3190    if (!hw) {
3191        ALOGE("%s: NULL camera device", __func__);
3192        return;
3193    }
3194
3195    hw->dump(fd);
3196    ALOGV("%s: X", __func__);
3197    return;
3198}
3199
3200/*===========================================================================
3201 * FUNCTION   : close_camera_device
3202 *
3203 * DESCRIPTION:
3204 *
3205 * PARAMETERS :
3206 *
3207 *
3208 * RETURN     :
3209 *==========================================================================*/
3210int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3211{
3212    ALOGV("%s: E", __func__);
3213    int ret = NO_ERROR;
3214    QCamera3HardwareInterface *hw =
3215        reinterpret_cast<QCamera3HardwareInterface *>(
3216            reinterpret_cast<camera3_device_t *>(device)->priv);
3217    if (!hw) {
3218        ALOGE("NULL camera device");
3219        return BAD_VALUE;
3220    }
3221    delete hw;
3222
3223    pthread_mutex_lock(&mCameraSessionLock);
3224    mCameraSessionActive = 0;
3225    pthread_mutex_unlock(&mCameraSessionLock);
3226    ALOGV("%s: X", __func__);
3227    return ret;
3228}
3229
3230}; //end namespace qcamera
3231