QCamera3HWI.cpp revision 53074dcacf200e7252b5d064733b09d027d378e4
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31
32#include <cutils/properties.h>
33#include <hardware/camera3.h>
34#include <camera/CameraMetadata.h>
35#include <stdlib.h>
36#include <utils/Log.h>
37#include <utils/Errors.h>
38#include <ui/Fence.h>
39#include <gralloc_priv.h>
40#include "QCamera3HWI.h"
41#include "QCamera3Mem.h"
42#include "QCamera3Channel.h"
43#include "QCamera3PostProc.h"
44
45using namespace android;
46
47namespace qcamera {
48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50parm_buffer_t *prevSettings;
51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52
53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
54    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
55    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
56    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
57    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
58    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
59    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
60    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
61    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
62    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
63};
64
65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
66    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
67    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
68    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
69    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
70    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
71    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
72    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
73    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
74    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
75};
76
77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
78    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
79    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
80    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
81    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
82    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
83    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
84    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
85    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
86    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
87    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
88    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
89    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
90    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
91    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
92    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
93};
94
95const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
96    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
97    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
98    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
99    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
100    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
101    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
102};
103
104const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
105    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
106    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
107    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
108    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
109};
110
111const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
112    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
113    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
114    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
115    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
116    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
117};
118
119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
120    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
121    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON   },
122    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH}
123};
124
125const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
126                                             320, 240, 176, 144, 0, 0};
127
128camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
129    initialize:                         QCamera3HardwareInterface::initialize,
130    configure_streams:                  QCamera3HardwareInterface::configure_streams,
131    register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
132    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
133    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
134    get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
135    dump:                               QCamera3HardwareInterface::dump,
136};
137
138
139/*===========================================================================
140 * FUNCTION   : QCamera3HardwareInterface
141 *
142 * DESCRIPTION: constructor of QCamera3HardwareInterface
143 *
144 * PARAMETERS :
145 *   @cameraId  : camera ID
146 *
147 * RETURN     : none
148 *==========================================================================*/
149QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
150    : mCameraId(cameraId),
151      mCameraHandle(NULL),
152      mCameraOpened(false),
153      mCallbackOps(NULL),
154      mInputStream(NULL),
155      mMetadataChannel(NULL),
156      mFirstRequest(false),
157      mParamHeap(NULL),
158      mParameters(NULL),
159      mJpegSettings(NULL)
160{
161    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
162    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
163    mCameraDevice.common.close = close_camera_device;
164    mCameraDevice.ops = &mCameraOps;
165    mCameraDevice.priv = this;
166    gCamCapability[cameraId]->version = CAM_HAL_V3;
167
168    pthread_mutex_init(&mRequestLock, NULL);
169    pthread_cond_init(&mRequestCond, NULL);
170    mPendingRequest = 0;
171    mCurrentRequestId = -1;
172
173    pthread_mutex_init(&mMutex, NULL);
174    pthread_mutex_init(&mCaptureResultLock, NULL);
175
176    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
177        mDefaultMetadata[i] = NULL;
178}
179
180/*===========================================================================
181 * FUNCTION   : ~QCamera3HardwareInterface
182 *
183 * DESCRIPTION: destructor of QCamera3HardwareInterface
184 *
185 * PARAMETERS : none
186 *
187 * RETURN     : none
188 *==========================================================================*/
189QCamera3HardwareInterface::~QCamera3HardwareInterface()
190{
191    ALOGV("%s: E", __func__);
192    /* Clean up all channels */
193    mMetadataChannel->stop();
194    delete mMetadataChannel;
195    mMetadataChannel = NULL;
196    /* We need to stop all streams before deleting any stream */
197    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
198        it != mStreamInfo.end(); it++) {
199        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
200        channel->stop();
201    }
202    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
203        it != mStreamInfo.end(); it++) {
204        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
205        delete channel;
206        free (*it);
207    }
208
209    if (mJpegSettings != NULL) {
210        free(mJpegSettings);
211        mJpegSettings = NULL;
212    }
213    deinitParameters();
214    closeCamera();
215
216    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
217        if (mDefaultMetadata[i])
218            free_camera_metadata(mDefaultMetadata[i]);
219
220    pthread_mutex_destroy(&mRequestLock);
221    pthread_cond_destroy(&mRequestCond);
222
223    pthread_mutex_destroy(&mMutex);
224    pthread_mutex_destroy(&mCaptureResultLock);
225    ALOGV("%s: X", __func__);
226}
227
228/*===========================================================================
229 * FUNCTION   : openCamera
230 *
231 * DESCRIPTION: open camera
232 *
233 * PARAMETERS :
234 *   @hw_device  : double ptr for camera device struct
235 *
236 * RETURN     : int32_t type of status
237 *              NO_ERROR  -- success
238 *              none-zero failure code
239 *==========================================================================*/
240int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
241{
242    //int rc = NO_ERROR;
243    int rc = 0;
244    if (mCameraOpened) {
245        *hw_device = NULL;
246        return PERMISSION_DENIED;
247    }
248
249    rc = openCamera();
250    if (rc == 0)
251        *hw_device = &mCameraDevice.common;
252    else
253        *hw_device = NULL;
254    return rc;
255}
256
257/*===========================================================================
258 * FUNCTION   : openCamera
259 *
260 * DESCRIPTION: open camera
261 *
262 * PARAMETERS : none
263 *
264 * RETURN     : int32_t type of status
265 *              NO_ERROR  -- success
266 *              none-zero failure code
267 *==========================================================================*/
268int QCamera3HardwareInterface::openCamera()
269{
270    if (mCameraHandle) {
271        ALOGE("Failure: Camera already opened");
272        return ALREADY_EXISTS;
273    }
274    mCameraHandle = camera_open(mCameraId);
275    if (!mCameraHandle) {
276        ALOGE("camera_open failed.");
277        return UNKNOWN_ERROR;
278    }
279
280    mCameraOpened = true;
281
282    return NO_ERROR;
283}
284
285/*===========================================================================
286 * FUNCTION   : closeCamera
287 *
288 * DESCRIPTION: close camera
289 *
290 * PARAMETERS : none
291 *
292 * RETURN     : int32_t type of status
293 *              NO_ERROR  -- success
294 *              none-zero failure code
295 *==========================================================================*/
296int QCamera3HardwareInterface::closeCamera()
297{
298    int rc = NO_ERROR;
299
300    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
301    mCameraHandle = NULL;
302    mCameraOpened = false;
303
304    return rc;
305}
306
307/*===========================================================================
308 * FUNCTION   : initialize
309 *
310 * DESCRIPTION: Initialize frameworks callback functions
311 *
312 * PARAMETERS :
313 *   @callback_ops : callback function to frameworks
314 *
315 * RETURN     :
316 *
317 *==========================================================================*/
318int QCamera3HardwareInterface::initialize(
319        const struct camera3_callback_ops *callback_ops)
320{
321    int rc;
322
323    pthread_mutex_lock(&mMutex);
324
325    rc = initParameters();
326    if (rc < 0) {
327        ALOGE("%s: initParamters failed %d", __func__, rc);
328       goto err1;
329    }
330    //Create metadata channel and initialize it
331    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
332                    mCameraHandle->ops, captureResultCb,
333                    &gCamCapability[mCameraId]->padding_info, this);
334    if (mMetadataChannel == NULL) {
335        ALOGE("%s: failed to allocate metadata channel", __func__);
336        rc = -ENOMEM;
337        goto err2;
338    }
339    rc = mMetadataChannel->initialize();
340    if (rc < 0) {
341        ALOGE("%s: metadata channel initialization failed", __func__);
342        goto err3;
343    }
344
345    mCallbackOps = callback_ops;
346
347    pthread_mutex_unlock(&mMutex);
348    return 0;
349
350err3:
351    delete mMetadataChannel;
352    mMetadataChannel = NULL;
353err2:
354    deinitParameters();
355err1:
356    pthread_mutex_unlock(&mMutex);
357    return rc;
358}
359
360/*===========================================================================
361 * FUNCTION   : configureStreams
362 *
363 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
364 *              and output streams.
365 *
366 * PARAMETERS :
367 *   @stream_list : streams to be configured
368 *
369 * RETURN     :
370 *
371 *==========================================================================*/
372int QCamera3HardwareInterface::configureStreams(
373        camera3_stream_configuration_t *streamList)
374{
375    int rc = 0;
376    pthread_mutex_lock(&mMutex);
377
378    // Sanity check stream_list
379    if (streamList == NULL) {
380        ALOGE("%s: NULL stream configuration", __func__);
381        pthread_mutex_unlock(&mMutex);
382        return BAD_VALUE;
383    }
384
385    if (streamList->streams == NULL) {
386        ALOGE("%s: NULL stream list", __func__);
387        pthread_mutex_unlock(&mMutex);
388        return BAD_VALUE;
389    }
390
391    if (streamList->num_streams < 1) {
392        ALOGE("%s: Bad number of streams requested: %d", __func__,
393                streamList->num_streams);
394        pthread_mutex_unlock(&mMutex);
395        return BAD_VALUE;
396    }
397
398    camera3_stream_t *inputStream = NULL;
399    /* first invalidate all the steams in the mStreamList
400     * if they appear again, they will be validated */
401    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
402            it != mStreamInfo.end(); it++) {
403        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
404        channel->stop();
405        (*it)->status = INVALID;
406    }
407
408    for (size_t i = 0; i < streamList->num_streams; i++) {
409        camera3_stream_t *newStream = streamList->streams[i];
410        ALOGV("%s: newStream type = %d, stream format = %d",
411                __func__, newStream->stream_type, newStream->format);
412        //if the stream is in the mStreamList validate it
413        bool stream_exists = false;
414        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
415                it != mStreamInfo.end(); it++) {
416            if ((*it)->stream == newStream) {
417                QCamera3Channel *channel =
418                    (QCamera3Channel*)(*it)->stream->priv;
419                stream_exists = true;
420                (*it)->status = RECONFIGURE;
421                /*delete the channel object associated with the stream because
422                  we need to reconfigure*/
423                delete channel;
424                (*it)->stream->priv = NULL;
425            }
426        }
427        if (!stream_exists) {
428            //new stream
429            stream_info_t* stream_info;
430            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
431            stream_info->stream = newStream;
432            stream_info->status = VALID;
433            stream_info->registered = 0;
434            mStreamInfo.push_back(stream_info);
435        }
436        if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
437            if (inputStream != NULL) {
438                ALOGE("%s: Multiple input streams requested!", __func__);
439                pthread_mutex_unlock(&mMutex);
440                return BAD_VALUE;
441            }
442            inputStream = newStream;
443        }
444    }
445    mInputStream = inputStream;
446
447    /*clean up invalid streams*/
448    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
449            it != mStreamInfo.end();) {
450        if(((*it)->status) == INVALID){
451            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
452            delete channel;
453            delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
454            free(*it);
455            it = mStreamInfo.erase(it);
456        } else {
457            it++;
458        }
459    }
460
461    //mMetadataChannel->stop();
462
463    /* Allocate channel objects for the requested streams */
464    for (size_t i = 0; i < streamList->num_streams; i++) {
465        camera3_stream_t *newStream = streamList->streams[i];
466        if (newStream->priv == NULL) {
467            //New stream, construct channel
468            switch (newStream->stream_type) {
469            case CAMERA3_STREAM_INPUT:
470                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
471                break;
472            case CAMERA3_STREAM_BIDIRECTIONAL:
473                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
474                    GRALLOC_USAGE_HW_CAMERA_WRITE;
475                break;
476            case CAMERA3_STREAM_OUTPUT:
477                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
478                break;
479            default:
480                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
481                break;
482            }
483
484            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
485                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
486                QCamera3Channel *channel;
487                switch (newStream->format) {
488                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
489                case HAL_PIXEL_FORMAT_YCbCr_420_888:
490                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
491                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
492                            mCameraHandle->ops, captureResultCb,
493                            &gCamCapability[mCameraId]->padding_info, this, newStream);
494                    if (channel == NULL) {
495                        ALOGE("%s: allocation of channel failed", __func__);
496                        pthread_mutex_unlock(&mMutex);
497                        return -ENOMEM;
498                    }
499
500                    newStream->priv = channel;
501                    break;
502                case HAL_PIXEL_FORMAT_BLOB:
503                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
504                    channel = new QCamera3PicChannel(mCameraHandle->camera_handle,
505                            mCameraHandle->ops, captureResultCb,
506                            &gCamCapability[mCameraId]->padding_info, this, newStream);
507                    if (channel == NULL) {
508                        ALOGE("%s: allocation of channel failed", __func__);
509                        pthread_mutex_unlock(&mMutex);
510                        return -ENOMEM;
511                    }
512                    newStream->priv = channel;
513                    break;
514
515                //TODO: Add support for app consumed format?
516                default:
517                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
518                    break;
519                }
520            }
521        } else {
522            // Channel already exists for this stream
523            // Do nothing for now
524        }
525    }
526    /*For the streams to be reconfigured we need to register the buffers
527      since the framework wont*/
528    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
529            it != mStreamInfo.end(); it++) {
530        if ((*it)->status == RECONFIGURE) {
531            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
532            /*only register buffers for streams that have already been
533              registered*/
534            if ((*it)->registered) {
535                rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
536                        (*it)->buffer_set.buffers);
537                if (rc != NO_ERROR) {
538                    ALOGE("%s: Failed to register the buffers of old stream,\
539                            rc = %d", __func__, rc);
540                }
541                ALOGD("%s: channel %p has %d buffers",
542                        __func__, channel, (*it)->buffer_set.num_buffers);
543            }
544        }
545
546        ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
547        if (index == NAME_NOT_FOUND) {
548            mPendingBuffersMap.add((*it)->stream, 0);
549        } else {
550            mPendingBuffersMap.editValueAt(index) = 0;
551        }
552    }
553
554    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
555    mPendingRequestsList.clear();
556
557    //settings/parameters don't carry over for new configureStreams
558    memset(mParameters, 0, sizeof(parm_buffer_t));
559    mFirstRequest = true;
560
561    pthread_mutex_unlock(&mMutex);
562    return rc;
563}
564
565/*===========================================================================
566 * FUNCTION   : validateCaptureRequest
567 *
568 * DESCRIPTION: validate a capture request from camera service
569 *
570 * PARAMETERS :
571 *   @request : request from framework to process
572 *
573 * RETURN     :
574 *
575 *==========================================================================*/
576int QCamera3HardwareInterface::validateCaptureRequest(
577                    camera3_capture_request_t *request)
578{
579    ssize_t idx = 0;
580    const camera3_stream_buffer_t *b;
581    CameraMetadata meta;
582
583    /* Sanity check the request */
584    if (request == NULL) {
585        ALOGE("%s: NULL capture request", __func__);
586        return BAD_VALUE;
587    }
588
589    uint32_t frameNumber = request->frame_number;
590    if (request->input_buffer != NULL &&
591            request->input_buffer->stream != mInputStream) {
592        ALOGE("%s: Request %d: Input buffer not from input stream!",
593                __FUNCTION__, frameNumber);
594        return BAD_VALUE;
595    }
596    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
597        ALOGE("%s: Request %d: No output buffers provided!",
598                __FUNCTION__, frameNumber);
599        return BAD_VALUE;
600    }
601    if (request->input_buffer != NULL) {
602        //TODO
603        ALOGE("%s: Not supporting input buffer yet", __func__);
604        return BAD_VALUE;
605    }
606
607    // Validate all buffers
608    b = request->output_buffers;
609    do {
610        QCamera3Channel *channel =
611                static_cast<QCamera3Channel*>(b->stream->priv);
612        if (channel == NULL) {
613            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
614                    __func__, frameNumber, idx);
615            return BAD_VALUE;
616        }
617        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
618            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
619                    __func__, frameNumber, idx);
620            return BAD_VALUE;
621        }
622        if (b->release_fence != -1) {
623            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
624                    __func__, frameNumber, idx);
625            return BAD_VALUE;
626        }
627        if (b->buffer == NULL) {
628            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
629                    __func__, frameNumber, idx);
630            return BAD_VALUE;
631        }
632        idx++;
633        b = request->output_buffers + idx;
634    } while (idx < (ssize_t)request->num_output_buffers);
635
636    return NO_ERROR;
637}
638
639/*===========================================================================
640 * FUNCTION   : registerStreamBuffers
641 *
642 * DESCRIPTION: Register buffers for a given stream with the HAL device.
643 *
644 * PARAMETERS :
645 *   @stream_list : streams to be configured
646 *
647 * RETURN     :
648 *
649 *==========================================================================*/
650int QCamera3HardwareInterface::registerStreamBuffers(
651        const camera3_stream_buffer_set_t *buffer_set)
652{
653    int rc = 0;
654
655    pthread_mutex_lock(&mMutex);
656
657    if (buffer_set == NULL) {
658        ALOGE("%s: Invalid buffer_set parameter.", __func__);
659        pthread_mutex_unlock(&mMutex);
660        return -EINVAL;
661    }
662    if (buffer_set->stream == NULL) {
663        ALOGE("%s: Invalid stream parameter.", __func__);
664        pthread_mutex_unlock(&mMutex);
665        return -EINVAL;
666    }
667    if (buffer_set->num_buffers < 1) {
668        ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
669        pthread_mutex_unlock(&mMutex);
670        return -EINVAL;
671    }
672    if (buffer_set->buffers == NULL) {
673        ALOGE("%s: Invalid buffers parameter.", __func__);
674        pthread_mutex_unlock(&mMutex);
675        return -EINVAL;
676    }
677
678    camera3_stream_t *stream = buffer_set->stream;
679    QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
680
681    //set the buffer_set in the mStreamInfo array
682    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
683            it != mStreamInfo.end(); it++) {
684        if ((*it)->stream == stream) {
685            uint32_t numBuffers = buffer_set->num_buffers;
686            (*it)->buffer_set.stream = buffer_set->stream;
687            (*it)->buffer_set.num_buffers = numBuffers;
688            (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
689            if ((*it)->buffer_set.buffers == NULL) {
690                ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
691                pthread_mutex_unlock(&mMutex);
692                return -ENOMEM;
693            }
694            for (size_t j = 0; j < numBuffers; j++){
695                (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
696            }
697            (*it)->registered = 1;
698        }
699    }
700
701    if (stream->stream_type != CAMERA3_STREAM_OUTPUT) {
702        ALOGE("%s: not yet support non output type stream", __func__);
703        pthread_mutex_unlock(&mMutex);
704        return -EINVAL;
705    }
706    rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
707    if (rc < 0) {
708        ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
709        pthread_mutex_unlock(&mMutex);
710        return -ENODEV;
711    }
712
713    pthread_mutex_unlock(&mMutex);
714    return NO_ERROR;
715}
716
717/*===========================================================================
718 * FUNCTION   : processCaptureRequest
719 *
720 * DESCRIPTION: process a capture request from camera service
721 *
722 * PARAMETERS :
723 *   @request : request from framework to process
724 *
725 * RETURN     :
726 *
727 *==========================================================================*/
728int QCamera3HardwareInterface::processCaptureRequest(
729                    camera3_capture_request_t *request)
730{
731    int rc = NO_ERROR;
732    int32_t request_id;
733    CameraMetadata meta;
734
735    pthread_mutex_lock(&mMutex);
736
737    rc = validateCaptureRequest(request);
738    if (rc != NO_ERROR) {
739        ALOGE("%s: incoming request is not valid", __func__);
740        pthread_mutex_unlock(&mMutex);
741        return rc;
742    }
743
744    uint32_t frameNumber = request->frame_number;
745
746    rc = setFrameParameters(request->frame_number, request->settings);
747    if (rc < 0) {
748        ALOGE("%s: fail to set frame parameters", __func__);
749        pthread_mutex_unlock(&mMutex);
750        return rc;
751    }
752
753    meta = request->settings;
754    if (meta.exists(ANDROID_REQUEST_ID)) {
755        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
756        mCurrentRequestId = request_id;
757        ALOGD("%s: Received request with id: %d",__func__, request_id);
758    } else if (mFirstRequest || mCurrentRequestId == -1){
759        ALOGE("%s: Unable to find request id field, \
760                & no previous id available", __func__);
761        return NAME_NOT_FOUND;
762    } else {
763        ALOGD("%s: Re-using old request id", __func__);
764        request_id = mCurrentRequestId;
765    }
766
767
768    ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__,
769                                    request->num_output_buffers);
770    // Acquire all request buffers first
771    for (size_t i = 0; i < request->num_output_buffers; i++) {
772        const camera3_stream_buffer_t& output = request->output_buffers[i];
773        sp<Fence> acquireFence = new Fence(output.acquire_fence);
774
775        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
776        //Call function to store local copy of jpeg data for encode params.
777            rc = getJpegSettings(request->settings);
778            if (rc < 0) {
779                ALOGE("%s: failed to get jpeg parameters", __func__);
780                pthread_mutex_unlock(&mMutex);
781                return rc;
782            }
783        }
784
785        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
786        if (rc != OK) {
787            ALOGE("%s: fence wait failed %d", __func__, rc);
788            pthread_mutex_unlock(&mMutex);
789            return rc;
790        }
791    }
792
793    /* Update pending request list and pending buffers map */
794    pthread_mutex_lock(&mRequestLock);
795    PendingRequestInfo pendingRequest;
796    pendingRequest.frame_number = frameNumber;
797    pendingRequest.num_buffers = request->num_output_buffers;
798    pendingRequest.request_id = request_id;
799
800    for (size_t i = 0; i < request->num_output_buffers; i++) {
801        RequestedBufferInfo requestedBuf;
802        requestedBuf.stream = request->output_buffers[i].stream;
803        requestedBuf.buffer = NULL;
804        pendingRequest.buffers.push_back(requestedBuf);
805
806        mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
807    }
808    mPendingRequestsList.push_back(pendingRequest);
809    pthread_mutex_unlock(&mRequestLock);
810
811    // Notify metadata channel we receive a request
812    mMetadataChannel->request(NULL, frameNumber);
813
814    // Call request on other streams
815    for (size_t i = 0; i < request->num_output_buffers; i++) {
816        const camera3_stream_buffer_t& output = request->output_buffers[i];
817        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
818
819        if (channel == NULL) {
820            ALOGE("%s: invalid channel pointer for stream", __func__);
821            continue;
822        }
823
824        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
825            rc = channel->request(output.buffer, frameNumber, mJpegSettings);
826        } else {
827            ALOGI("%s: %d, request with buffer %p, frame_number %d", __func__, __LINE__, output.buffer, frameNumber);
828            rc = channel->request(output.buffer, frameNumber);
829        }
830        if (rc < 0)
831            ALOGE("%s: request failed", __func__);
832    }
833
834    mFirstRequest = false;
835
836    //Block on conditional variable
837    pthread_mutex_lock(&mRequestLock);
838    mPendingRequest = 1;
839    while (mPendingRequest == 1) {
840        pthread_cond_wait(&mRequestCond, &mRequestLock);
841    }
842    pthread_mutex_unlock(&mRequestLock);
843
844    pthread_mutex_unlock(&mMutex);
845    return rc;
846}
847
848/*===========================================================================
849 * FUNCTION   : getMetadataVendorTagOps
850 *
851 * DESCRIPTION:
852 *
853 * PARAMETERS :
854 *
855 *
856 * RETURN     :
857 *==========================================================================*/
858void QCamera3HardwareInterface::getMetadataVendorTagOps(
859                    vendor_tag_query_ops_t* /*ops*/)
860{
861    /* Enable locks when we eventually add Vendor Tags */
862    /*
863    pthread_mutex_lock(&mMutex);
864
865    pthread_mutex_unlock(&mMutex);
866    */
867    return;
868}
869
870/*===========================================================================
871 * FUNCTION   : dump
872 *
873 * DESCRIPTION:
874 *
875 * PARAMETERS :
876 *
877 *
878 * RETURN     :
879 *==========================================================================*/
880void QCamera3HardwareInterface::dump(int /*fd*/)
881{
882    /*Enable lock when we implement this function*/
883    /*
884    pthread_mutex_lock(&mMutex);
885
886    pthread_mutex_unlock(&mMutex);
887    */
888    return;
889}
890
891/*===========================================================================
892 * FUNCTION   : captureResultCb
893 *
894 * DESCRIPTION: Callback handler for all capture result
895 *              (streams, as well as metadata)
896 *
897 * PARAMETERS :
898 *   @metadata : metadata information
899 *   @buffer   : actual gralloc buffer to be returned to frameworks.
900 *               NULL if metadata.
901 *
902 * RETURN     : NONE
903 *==========================================================================*/
904void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
905                camera3_stream_buffer_t *buffer, uint32_t frame_number)
906{
907    pthread_mutex_lock(&mRequestLock);
908
909    if (metadata_buf) {
910        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
911        int32_t frame_number_valid = *(int32_t *)
912            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
913        uint32_t frame_number = *(uint32_t *)
914            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
915        const struct timeval *tv = (const struct timeval *)
916            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
917        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
918            tv->tv_usec * NSEC_PER_USEC;
919
920        if (!frame_number_valid) {
921            ALOGD("%s: Not a valid frame number, used as SOF only", __func__);
922            mMetadataChannel->bufDone(metadata_buf);
923            goto done_metadata;
924        }
925        ALOGD("%s: valid frame_number = %d, capture_time = %lld", __func__,
926                frame_number, capture_time);
927
928        // Go through the pending requests info and send shutter/results to frameworks
929        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
930                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
931            camera3_capture_result_t result;
932            camera3_notify_msg_t notify_msg;
933            ALOGD("%s: frame_number in the list is %d", __func__, i->frame_number);
934
935            // Flush out all entries with less or equal frame numbers.
936
937            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
938            //Right now it's the same as metadata timestamp
939
940            //TODO: When there is metadata drop, how do we derive the timestamp of
941            //dropped frames? For now, we fake the dropped timestamp by substracting
942            //from the reported timestamp
943            nsecs_t current_capture_time = capture_time -
944                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
945
946            // Send shutter notify to frameworks
947            notify_msg.type = CAMERA3_MSG_SHUTTER;
948            notify_msg.message.shutter.frame_number = i->frame_number;
949            notify_msg.message.shutter.timestamp = current_capture_time;
950            mCallbackOps->notify(mCallbackOps, &notify_msg);
951            ALOGD("%s: notify frame_number = %d, capture_time = %lld", __func__,
952                    i->frame_number, capture_time);
953
954            // Send empty metadata with already filled buffers for dropped metadata
955            // and send valid metadata with already filled buffers for current metadata
956            if (i->frame_number < frame_number) {
957                CameraMetadata emptyMetadata(1, 0);
958                emptyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
959                        &current_capture_time, 1);
960                emptyMetadata.update(ANDROID_REQUEST_ID,
961                        &(i->request_id), 1);
962                result.result = emptyMetadata.release();
963            } else {
964                result.result = translateCbMetadataToResultMetadata(metadata,
965                        current_capture_time, i->request_id);
966                // Return metadata buffer
967                mMetadataChannel->bufDone(metadata_buf);
968            }
969            if (!result.result) {
970                ALOGE("%s: metadata is NULL", __func__);
971            }
972            result.frame_number = i->frame_number;
973            result.num_output_buffers = 0;
974            result.output_buffers = NULL;
975            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
976                    j != i->buffers.end(); j++) {
977                if (j->buffer) {
978                    result.num_output_buffers++;
979                }
980            }
981
982            if (result.num_output_buffers > 0) {
983                camera3_stream_buffer_t *result_buffers =
984                    new camera3_stream_buffer_t[result.num_output_buffers];
985                if (!result_buffers) {
986                    ALOGE("%s: Fatal error: out of memory", __func__);
987                }
988                size_t result_buffers_idx = 0;
989                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
990                        j != i->buffers.end(); j++) {
991                    if (j->buffer) {
992                        result_buffers[result_buffers_idx++] = *(j->buffer);
993                        free(j->buffer);
994                        mPendingBuffersMap.editValueFor(j->stream)--;
995                    }
996                }
997                result.output_buffers = result_buffers;
998
999                mCallbackOps->process_capture_result(mCallbackOps, &result);
1000                ALOGD("%s: meta frame_number = %d, capture_time = %lld",
1001                        __func__, result.frame_number, current_capture_time);
1002                free_camera_metadata((camera_metadata_t *)result.result);
1003                delete[] result_buffers;
1004            } else {
1005                mCallbackOps->process_capture_result(mCallbackOps, &result);
1006                ALOGD("%s: meta frame_number = %d, capture_time = %lld",
1007                        __func__, result.frame_number, current_capture_time);
1008                free_camera_metadata((camera_metadata_t *)result.result);
1009            }
1010            // erase the element from the list
1011            i = mPendingRequestsList.erase(i);
1012        }
1013
1014
1015done_metadata:
1016        bool max_buffers_dequeued = false;
1017        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1018            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1019            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1020            if (queued_buffers == stream->max_buffers) {
1021                max_buffers_dequeued = true;
1022                break;
1023            }
1024        }
1025        if (!max_buffers_dequeued) {
1026            // Unblock process_capture_request
1027            mPendingRequest = 0;
1028            pthread_cond_signal(&mRequestCond);
1029        }
1030    } else {
1031        // If the frame number doesn't exist in the pending request list,
1032        // directly send the buffer to the frameworks, and update pending buffers map
1033        // Otherwise, book-keep the buffer.
1034        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1035        while (i != mPendingRequestsList.end() && i->frame_number != frame_number)
1036            i++;
1037        if (i == mPendingRequestsList.end()) {
1038            // Verify all pending requests frame_numbers are greater
1039            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1040                    j != mPendingRequestsList.end(); j++) {
1041                if (j->frame_number < frame_number) {
1042                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
1043                            __func__, j->frame_number, frame_number);
1044                }
1045            }
1046            camera3_capture_result_t result;
1047            result.result = NULL;
1048            result.frame_number = frame_number;
1049            result.num_output_buffers = 1;
1050            result.output_buffers = buffer;
1051            ALOGD("%s: result frame_number = %d, buffer = %p",
1052                    __func__, frame_number, buffer);
1053            mPendingBuffersMap.editValueFor(buffer->stream)--;
1054            mCallbackOps->process_capture_result(mCallbackOps, &result);
1055        } else {
1056            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1057                    j != i->buffers.end(); j++) {
1058                if (j->stream == buffer->stream) {
1059                    if (j->buffer != NULL) {
1060                        ALOGE("%s: Error: buffer is already set", __func__);
1061                    } else {
1062                        j->buffer = (camera3_stream_buffer_t *)malloc(
1063                                sizeof(camera3_stream_buffer_t));
1064                        *(j->buffer) = *buffer;
1065                        ALOGD("%s: cache buffer %p at result frame_number %d",
1066                                __func__, buffer, frame_number);
1067                    }
1068                }
1069            }
1070        }
1071    }
1072
1073    pthread_mutex_unlock(&mRequestLock);
1074    return;
1075}
1076
1077/*===========================================================================
1078 * FUNCTION   : translateCbMetadataToResultMetadata
1079 *
1080 * DESCRIPTION:
1081 *
1082 * PARAMETERS :
1083 *   @metadata : metadata information from callback
1084 *
1085 * RETURN     : camera_metadata_t*
1086 *              metadata in a format specified by fwk
1087 *==========================================================================*/
1088camera_metadata_t*
1089QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1090                                (metadata_buffer_t *metadata, nsecs_t timestamp,
1091                                 int32_t request_id)
1092{
1093    CameraMetadata camMetadata;
1094    camera_metadata_t* resultMetadata;
1095
1096
1097    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1098    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1099
1100    /*CAM_INTF_META_HISTOGRAM - TODO*/
1101    /*cam_hist_stats_t  *histogram =
1102      (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1103      metadata);*/
1104
1105    /*face detection*/
1106    cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1107        POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1108    uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1109    int32_t faceIds[numFaces];
1110    uint8_t faceScores[numFaces];
1111    int32_t faceRectangles[numFaces * 4];
1112    int32_t faceLandmarks[numFaces * 6];
1113    int j = 0, k = 0;
1114    for (int i = 0; i < numFaces; i++) {
1115        faceIds[i] = faceDetectionInfo->faces[i].face_id;
1116        faceScores[i] = faceDetectionInfo->faces[i].score;
1117        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1118                faceRectangles+j, -1);
1119        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1120        j+= 4;
1121        k+= 6;
1122    }
1123    camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1124    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1125    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1126            faceRectangles, numFaces*4);
1127    camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1128            faceLandmarks, numFaces*6);
1129
1130
1131    /*autofocus - TODO*/
1132    /*cam_auto_focus_data_t  *afData =(cam_auto_focus_data_t *)
1133      POINTER_OF(CAM_INTF_META_AUTOFOCUS_DATA,metadata);*/
1134
1135    uint8_t  *color_correct_mode =
1136        (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1137    camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1138
1139    int32_t  *ae_precapture_id =
1140        (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
1141    camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
1142
1143    /*aec regions*/
1144    cam_area_t  *hAeRegions =
1145        (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1146    int32_t aeRegions[5];
1147    convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1148    camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1149
1150    uint8_t  *ae_state =
1151        (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1152    camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
1153
1154    uint8_t  *focusMode =
1155        (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1156    camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1157
1158    /*af regions*/
1159    cam_area_t  *hAfRegions =
1160        (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1161    int32_t afRegions[5];
1162    convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1163    camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1164
1165    uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1166    camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1167
1168    int32_t  *afTriggerId =
1169        (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1170    camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1171
1172    uint8_t  *whiteBalance =
1173        (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1174    camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1175
1176    /*awb regions*/
1177    cam_area_t  *hAwbRegions =
1178        (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1179    int32_t awbRegions[5];
1180    convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1181    camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1182
1183    uint8_t  *whiteBalanceState =
1184        (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1185    camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1186
1187    uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1188    camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1189
1190    uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata);
1191    camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1192
1193    uint8_t  *flashPower =
1194        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1195    camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1196
1197    int64_t  *flashFiringTime =
1198        (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1199    camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1200
1201    /*int32_t  *ledMode =
1202      (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1203      camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1204
1205    uint8_t  *flashState =
1206        (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1207    camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1208
1209    uint8_t  *hotPixelMode =
1210        (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1211    camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1212
1213    float  *lensAperture =
1214        (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1215    camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1216
1217    float  *filterDensity =
1218        (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1219    camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1220
1221    float  *focalLength =
1222        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1223    camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1224
1225    float  *focusDistance =
1226        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1227    camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1228
1229    float  *focusRange =
1230        (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1231    camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1232
1233    uint8_t  *opticalStab =
1234        (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1235    camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1236
1237    /*int32_t  *focusState =
1238      (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1239      camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1240
1241    uint8_t  *noiseRedMode =
1242        (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1243    camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1244
1245    /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1246
1247    cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1248        POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1249    int32_t scalerCropRegion[3];
1250    scalerCropRegion[0] = hScalerCropRegion->left;
1251    scalerCropRegion[1] = hScalerCropRegion->top;
1252    scalerCropRegion[2] = hScalerCropRegion->width;
1253    camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 3);
1254
1255    int64_t  *sensorExpTime =
1256        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1257    camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1258
1259    int64_t  *sensorFameDuration =
1260        (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1261    camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1262
1263    int32_t  *sensorSensitivity =
1264        (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1265    camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1266
1267    uint8_t  *shadingMode =
1268        (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1269    camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1270
1271    uint8_t  *faceDetectMode =
1272        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1273    camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1274
1275    uint8_t  *histogramMode =
1276        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1277    camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1278
1279    uint8_t  *sharpnessMapMode =
1280        (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1281    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1282            sharpnessMapMode, 1);
1283
1284    /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1285    cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1286        POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1287    camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1288            (int32_t*)sharpnessMap->sharpness,
1289            CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1290
1291    resultMetadata = camMetadata.release();
1292    return resultMetadata;
1293}
1294
1295/*===========================================================================
1296 * FUNCTION   : convertToRegions
1297 *
1298 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1299 *
1300 * PARAMETERS :
1301 *   @rect   : cam_rect_t struct to convert
1302 *   @region : int32_t destination array
1303 *   @weight : if we are converting from cam_area_t, weight is valid
1304 *             else weight = -1
1305 *
1306 *==========================================================================*/
1307void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1308    region[0] = rect.left;
1309    region[1] = rect.top;
1310    region[2] = rect.left + rect.width;
1311    region[3] = rect.top + rect.height;
1312    if (weight > -1) {
1313        region[4] = weight;
1314    }
1315}
1316
1317/*===========================================================================
1318 * FUNCTION   : convertFromRegions
1319 *
1320 * DESCRIPTION: helper method to convert from array to cam_rect_t
1321 *
1322 * PARAMETERS :
1323 *   @rect   : cam_rect_t struct to convert
1324 *   @region : int32_t destination array
1325 *   @weight : if we are converting from cam_area_t, weight is valid
1326 *             else weight = -1
1327 *
1328 *==========================================================================*/
1329void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1330                                                   const camera_metadata_t *settings,
1331                                                   uint32_t tag){
1332    CameraMetadata frame_settings;
1333    frame_settings = settings;
1334    int32_t x_min = frame_settings.find(tag).data.i32[0];
1335    int32_t y_min = frame_settings.find(tag).data.i32[1];
1336    int32_t x_max = frame_settings.find(tag).data.i32[2];
1337    int32_t y_max = frame_settings.find(tag).data.i32[3];
1338    roi->weight = frame_settings.find(tag).data.i32[4];
1339    roi->rect.left = x_min;
1340    roi->rect.top = y_min;
1341    roi->rect.width = x_max - x_min;
1342    roi->rect.height = y_max - y_min;
1343}
1344
1345/*===========================================================================
1346 * FUNCTION   : convertLandmarks
1347 *
1348 * DESCRIPTION: helper method to extract the landmarks from face detection info
1349 *
1350 * PARAMETERS :
1351 *   @face   : cam_rect_t struct to convert
1352 *   @landmarks : int32_t destination array
1353 *
1354 *
1355 *==========================================================================*/
1356void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1357{
1358    landmarks[0] = face.left_eye_center.x;
1359    landmarks[1] = face.left_eye_center.y;
1360    landmarks[2] = face.right_eye_center.y;
1361    landmarks[3] = face.right_eye_center.y;
1362    landmarks[4] = face.mouth_center.x;
1363    landmarks[5] = face.mouth_center.y;
1364}
1365
1366#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1367/*===========================================================================
1368 * FUNCTION   : initCapabilities
1369 *
1370 * DESCRIPTION: initialize camera capabilities in static data struct
1371 *
1372 * PARAMETERS :
1373 *   @cameraId  : camera Id
1374 *
1375 * RETURN     : int32_t type of status
1376 *              NO_ERROR  -- success
1377 *              none-zero failure code
1378 *==========================================================================*/
1379int QCamera3HardwareInterface::initCapabilities(int cameraId)
1380{
1381    int rc = 0;
1382    mm_camera_vtbl_t *cameraHandle = NULL;
1383    QCamera3HeapMemory *capabilityHeap = NULL;
1384
1385    cameraHandle = camera_open(cameraId);
1386    if (!cameraHandle) {
1387        ALOGE("%s: camera_open failed", __func__);
1388        rc = -1;
1389        goto open_failed;
1390    }
1391
1392    capabilityHeap = new QCamera3HeapMemory();
1393    if (capabilityHeap == NULL) {
1394        ALOGE("%s: creation of capabilityHeap failed", __func__);
1395        goto heap_creation_failed;
1396    }
1397    /* Allocate memory for capability buffer */
1398    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1399    if(rc != OK) {
1400        ALOGE("%s: No memory for cappability", __func__);
1401        goto allocate_failed;
1402    }
1403
1404    /* Map memory for capability buffer */
1405    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1406    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1407                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
1408                                capabilityHeap->getFd(0),
1409                                sizeof(cam_capability_t));
1410    if(rc < 0) {
1411        ALOGE("%s: failed to map capability buffer", __func__);
1412        goto map_failed;
1413    }
1414
1415    /* Query Capability */
1416    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1417    if(rc < 0) {
1418        ALOGE("%s: failed to query capability",__func__);
1419        goto query_failed;
1420    }
1421    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1422    if (!gCamCapability[cameraId]) {
1423        ALOGE("%s: out of memory", __func__);
1424        goto query_failed;
1425    }
1426    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1427                                        sizeof(cam_capability_t));
1428    rc = 0;
1429
1430query_failed:
1431    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1432                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
1433map_failed:
1434    capabilityHeap->deallocate();
1435allocate_failed:
1436    delete capabilityHeap;
1437heap_creation_failed:
1438    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1439    cameraHandle = NULL;
1440open_failed:
1441    return rc;
1442}
1443
1444/*===========================================================================
1445 * FUNCTION   : initParameters
1446 *
1447 * DESCRIPTION: initialize camera parameters
1448 *
1449 * PARAMETERS :
1450 *
1451 * RETURN     : int32_t type of status
1452 *              NO_ERROR  -- success
1453 *              none-zero failure code
1454 *==========================================================================*/
1455int QCamera3HardwareInterface::initParameters()
1456{
1457    int rc = 0;
1458
1459    //Allocate Set Param Buffer
1460    mParamHeap = new QCamera3HeapMemory();
1461    rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1462    if(rc != OK) {
1463        rc = NO_MEMORY;
1464        ALOGE("Failed to allocate SETPARM Heap memory");
1465        delete mParamHeap;
1466        mParamHeap = NULL;
1467        return rc;
1468    }
1469
1470    //Map memory for parameters buffer
1471    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1472            CAM_MAPPING_BUF_TYPE_PARM_BUF,
1473            mParamHeap->getFd(0),
1474            sizeof(parm_buffer_t));
1475    if(rc < 0) {
1476        ALOGE("%s:failed to map SETPARM buffer",__func__);
1477        rc = FAILED_TRANSACTION;
1478        mParamHeap->deallocate();
1479        delete mParamHeap;
1480        mParamHeap = NULL;
1481        return rc;
1482    }
1483
1484    mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1485    return rc;
1486}
1487
1488/*===========================================================================
1489 * FUNCTION   : deinitParameters
1490 *
1491 * DESCRIPTION: de-initialize camera parameters
1492 *
1493 * PARAMETERS :
1494 *
1495 * RETURN     : NONE
1496 *==========================================================================*/
1497void QCamera3HardwareInterface::deinitParameters()
1498{
1499    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1500            CAM_MAPPING_BUF_TYPE_PARM_BUF);
1501
1502    mParamHeap->deallocate();
1503    delete mParamHeap;
1504    mParamHeap = NULL;
1505
1506    mParameters = NULL;
1507}
1508
1509/*===========================================================================
1510 * FUNCTION   : calcMaxJpegSize
1511 *
1512 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1513 *
1514 * PARAMETERS :
1515 *
1516 * RETURN     : max_jpeg_size
1517 *==========================================================================*/
1518int QCamera3HardwareInterface::calcMaxJpegSize()
1519{
1520    int32_t max_jpeg_size = 0;
1521    int temp_width, temp_height;
1522    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1523        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1524        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1525        if (temp_width * temp_height > max_jpeg_size ) {
1526            max_jpeg_size = temp_width * temp_height;
1527        }
1528    }
1529    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1530    return max_jpeg_size;
1531}
1532
1533/*===========================================================================
1534 * FUNCTION   : initStaticMetadata
1535 *
1536 * DESCRIPTION: initialize the static metadata
1537 *
1538 * PARAMETERS :
1539 *   @cameraId  : camera Id
1540 *
1541 * RETURN     : int32_t type of status
1542 *              0  -- success
1543 *              non-zero failure code
1544 *==========================================================================*/
1545int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1546{
1547    int rc = 0;
1548    CameraMetadata staticInfo;
1549    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1550    /*HAL 3 only*/
1551    /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1552                    &gCamCapability[cameraId]->min_focus_distance, 1); */
1553
1554    /*hard coded for now but this should come from sensor*/
1555    float min_focus_distance;
1556    if(facingBack){
1557        min_focus_distance = 10;
1558    } else {
1559        min_focus_distance = 0;
1560    }
1561    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1562                    &min_focus_distance, 1);
1563
1564    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1565                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
1566
1567    /*should be using focal lengths but sensor doesn't provide that info now*/
1568    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1569                      &gCamCapability[cameraId]->focal_length,
1570                      1);
1571
1572    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1573                      gCamCapability[cameraId]->apertures,
1574                      gCamCapability[cameraId]->apertures_count);
1575
1576    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1577                gCamCapability[cameraId]->filter_densities,
1578                gCamCapability[cameraId]->filter_densities_count);
1579
1580
1581    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1582                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1583                      gCamCapability[cameraId]->optical_stab_modes_count);
1584
1585    staticInfo.update(ANDROID_LENS_POSITION,
1586                      gCamCapability[cameraId]->lens_position,
1587                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
1588
1589    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1590                                                    gCamCapability[cameraId]->lens_shading_map_size.height};
1591    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1592                      lens_shading_map_size,
1593                      sizeof(lens_shading_map_size)/sizeof(int32_t));
1594
1595    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map,
1596            sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float));
1597
1598    int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
1599                                                      gCamCapability[cameraId]->geo_correction_map_size.height};
1600    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
1601            geo_correction_map_size,
1602            sizeof(geo_correction_map_size)/sizeof(int32_t));
1603
1604    staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
1605                       gCamCapability[cameraId]->geo_correction_map,
1606                       sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
1607
1608    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1609            gCamCapability[cameraId]->sensor_physical_size, 2);
1610
1611    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1612            gCamCapability[cameraId]->exposure_time_range, 2);
1613
1614    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1615            &gCamCapability[cameraId]->max_frame_duration, 1);
1616
1617
1618    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1619                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1620
1621    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1622                                               gCamCapability[cameraId]->pixel_array_size.height};
1623    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1624                      pixel_array_size, 2);
1625
1626    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width,
1627                                                gCamCapability[cameraId]->active_array_size.height};
1628
1629    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1630                      active_array_size, 2);
1631
1632    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1633            &gCamCapability[cameraId]->white_level, 1);
1634
1635    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1636            gCamCapability[cameraId]->black_level_pattern, 4);
1637
1638    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1639                      &gCamCapability[cameraId]->flash_charge_duration, 1);
1640
1641    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1642                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1643
1644    /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1645                      (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1646    /*hardcode 0 for now*/
1647    int32_t max_face_count = 0;
1648    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1649                      &max_face_count, 1);
1650
1651    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1652                      &gCamCapability[cameraId]->histogram_size, 1);
1653
1654    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1655            &gCamCapability[cameraId]->max_histogram_count, 1);
1656
1657    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1658                                                gCamCapability[cameraId]->sharpness_map_size.height};
1659
1660    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1661            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1662
1663    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1664            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1665
1666
1667    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1668                      &gCamCapability[cameraId]->raw_min_duration,
1669                       1);
1670
1671    int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888};
1672    int scalar_formats_count = 1;
1673    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1674                      scalar_formats,
1675                      scalar_formats_count);
1676
1677    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1678    makeTable(gCamCapability[cameraId]->supported_sizes_tbl,
1679              gCamCapability[cameraId]->supported_sizes_tbl_cnt,
1680              available_processed_sizes);
1681    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1682                available_processed_sizes,
1683                (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2);
1684
1685    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1686    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1687                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1688                 available_fps_ranges);
1689    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1690            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1691
1692    camera_metadata_rational exposureCompensationStep = {
1693            gCamCapability[cameraId]->exp_compensation_step.numerator,
1694            gCamCapability[cameraId]->exp_compensation_step.denominator};
1695    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1696                      &exposureCompensationStep, 1);
1697
1698    /*TO DO*/
1699    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1700    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1701                      availableVstabModes, sizeof(availableVstabModes));
1702
1703    /*HAL 1 and HAL 3 common*/
1704    float maxZoom = 10;
1705    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1706            &maxZoom, 1);
1707
1708    int32_t max3aRegions = 1;
1709    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1710            &max3aRegions, 1);
1711
1712    uint8_t availableFaceDetectModes[] = {
1713            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1714    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1715                      availableFaceDetectModes,
1716                      sizeof(availableFaceDetectModes));
1717
1718    int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width,
1719                                       gCamCapability[cameraId]->raw_dim.height};
1720    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1721                      raw_size,
1722                      sizeof(raw_size)/sizeof(uint32_t));
1723
1724    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1725                                                        gCamCapability[cameraId]->exposure_compensation_max};
1726    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1727            exposureCompensationRange,
1728            sizeof(exposureCompensationRange)/sizeof(int32_t));
1729
1730    uint8_t lensFacing = (facingBack) ?
1731            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1732    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1733
1734    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
1735    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1736              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1737              available_jpeg_sizes);
1738    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1739                available_jpeg_sizes,
1740                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1741
1742    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1743                      available_thumbnail_sizes,
1744                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1745
1746    int32_t max_jpeg_size = 0;
1747    int temp_width, temp_height;
1748    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1749        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1750        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1751        if (temp_width * temp_height > max_jpeg_size ) {
1752            max_jpeg_size = temp_width * temp_height;
1753        }
1754    }
1755    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1756    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1757                      &max_jpeg_size, 1);
1758
1759    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1760    int32_t size = 0;
1761    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1762        int val = lookupFwkName(EFFECT_MODES_MAP,
1763                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1764                                   gCamCapability[cameraId]->supported_effects[i]);
1765        if (val != NAME_NOT_FOUND) {
1766            avail_effects[size] = (uint8_t)val;
1767            size++;
1768        }
1769    }
1770    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1771                      avail_effects,
1772                      size);
1773
1774    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1775    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1776    int32_t supported_scene_modes_cnt = 0;
1777    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1778        int val = lookupFwkName(SCENE_MODES_MAP,
1779                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1780                                gCamCapability[cameraId]->supported_scene_modes[i]);
1781        if (val != NAME_NOT_FOUND) {
1782            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1783            supported_indexes[supported_scene_modes_cnt] = i;
1784            supported_scene_modes_cnt++;
1785        }
1786    }
1787
1788    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1789                      avail_scene_modes,
1790                      supported_scene_modes_cnt);
1791
1792    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
1793    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
1794                      supported_scene_modes_cnt,
1795                      scene_mode_overrides,
1796                      supported_indexes);
1797    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
1798                      scene_mode_overrides,
1799                      supported_scene_modes_cnt*3);
1800
1801    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
1802    size = 0;
1803    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
1804        int val = lookupFwkName(ANTIBANDING_MODES_MAP,
1805                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
1806                                 gCamCapability[cameraId]->supported_antibandings[i]);
1807        if (val != NAME_NOT_FOUND) {
1808            avail_antibanding_modes[size] = (uint8_t)val;
1809            size++;
1810        }
1811
1812    }
1813    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1814                      avail_antibanding_modes,
1815                      size);
1816
1817    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
1818    size = 0;
1819    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
1820        int val = lookupFwkName(FOCUS_MODES_MAP,
1821                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
1822                                gCamCapability[cameraId]->supported_focus_modes[i]);
1823        if (val != NAME_NOT_FOUND) {
1824            avail_af_modes[size] = (uint8_t)val;
1825            size++;
1826        }
1827    }
1828    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1829                      avail_af_modes,
1830                      size);
1831
1832    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
1833    size = 0;
1834    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
1835        int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
1836                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
1837                                    gCamCapability[cameraId]->supported_white_balances[i]);
1838        if (val != NAME_NOT_FOUND) {
1839            avail_awb_modes[size] = (uint8_t)val;
1840            size++;
1841        }
1842    }
1843    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1844                      avail_awb_modes,
1845                      size);
1846
1847    uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX];
1848    size = 0;
1849    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) {
1850        int val = lookupFwkName(FLASH_MODES_MAP,
1851                                sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]),
1852                                gCamCapability[cameraId]->supported_flash_modes[i]);
1853        if (val != NAME_NOT_FOUND) {
1854            avail_flash_modes[size] = (uint8_t)val;
1855            size++;
1856        }
1857    }
1858    static uint8_t flashAvailable = 0;
1859    if (size > 1) {
1860        //flash is supported
1861        flashAvailable = 1;
1862    }
1863    staticInfo.update(ANDROID_FLASH_MODE,
1864                      avail_flash_modes,
1865                      size);
1866
1867    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
1868            &flashAvailable, 1);
1869
1870    uint8_t avail_ae_modes[5];
1871    size = 0;
1872    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
1873        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
1874        size++;
1875    }
1876    if (flashAvailable) {
1877        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
1878        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
1879        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
1880    }
1881    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1882                      avail_ae_modes,
1883                      size);
1884
1885    gStaticMetadata[cameraId] = staticInfo.release();
1886    return rc;
1887}
1888
1889/*===========================================================================
1890 * FUNCTION   : makeTable
1891 *
1892 * DESCRIPTION: make a table of sizes
1893 *
1894 * PARAMETERS :
1895 *
1896 *
1897 *==========================================================================*/
1898void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
1899                                          int32_t* sizeTable)
1900{
1901    int j = 0;
1902    for (int i = 0; i < size; i++) {
1903        sizeTable[j] = dimTable[i].width;
1904        sizeTable[j+1] = dimTable[i].height;
1905        j+=2;
1906    }
1907}
1908
1909/*===========================================================================
1910 * FUNCTION   : makeFPSTable
1911 *
1912 * DESCRIPTION: make a table of fps ranges
1913 *
1914 * PARAMETERS :
1915 *
1916 *==========================================================================*/
1917void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
1918                                          int32_t* fpsRangesTable)
1919{
1920    int j = 0;
1921    for (int i = 0; i < size; i++) {
1922        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
1923        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
1924        j+=2;
1925    }
1926}
1927
1928/*===========================================================================
1929 * FUNCTION   : makeOverridesList
1930 *
1931 * DESCRIPTION: make a list of scene mode overrides
1932 *
1933 * PARAMETERS :
1934 *
1935 *
1936 *==========================================================================*/
1937void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
1938                                                  uint8_t size, uint8_t* overridesList,
1939                                                  uint8_t* supported_indexes)
1940{
1941    /*daemon will give a list of overrides for all scene modes.
1942      However we should send the fwk only the overrides for the scene modes
1943      supported by the framework*/
1944    int j = 0, index = 0;
1945    for (int i = 0; i < size; i++) {
1946        index = supported_indexes[i];
1947        overridesList[j] = (int32_t)overridesTable[index].ae_mode;
1948        overridesList[j+1] = (int32_t)overridesTable[index].awb_mode;
1949        overridesList[j+2] = (int32_t)overridesTable[index].af_mode;
1950        j+=3;
1951    }
1952}
1953
1954/*===========================================================================
1955 * FUNCTION   : getPreviewHalPixelFormat
1956 *
1957 * DESCRIPTION: convert the format to type recognized by framework
1958 *
1959 * PARAMETERS : format : the format from backend
1960 *
1961 ** RETURN    : format recognized by framework
1962 *
1963 *==========================================================================*/
1964int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
1965{
1966    int32_t halPixelFormat;
1967
1968    switch (format) {
1969    case CAM_FORMAT_YUV_420_NV12:
1970        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
1971        break;
1972    case CAM_FORMAT_YUV_420_NV21:
1973        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1974        break;
1975    case CAM_FORMAT_YUV_420_NV21_ADRENO:
1976        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
1977        break;
1978    case CAM_FORMAT_YUV_420_YV12:
1979        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
1980        break;
1981    case CAM_FORMAT_YUV_422_NV16:
1982    case CAM_FORMAT_YUV_422_NV61:
1983    default:
1984        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1985        break;
1986    }
1987    return halPixelFormat;
1988}
1989
1990/*===========================================================================
1991 * FUNCTION   : AddSetParmEntryToBatch
1992 *
1993 * DESCRIPTION: add set parameter entry into batch
1994 *
1995 * PARAMETERS :
1996 *   @p_table     : ptr to parameter buffer
1997 *   @paramType   : parameter type
1998 *   @paramLength : length of parameter value
1999 *   @paramValue  : ptr to parameter value
2000 *
2001 * RETURN     : int32_t type of status
2002 *              NO_ERROR  -- success
2003 *              none-zero failure code
2004 *==========================================================================*/
2005int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2006                                                          cam_intf_parm_type_t paramType,
2007                                                          uint32_t paramLength,
2008                                                          void *paramValue)
2009{
2010    int position = paramType;
2011    int current, next;
2012
2013    /*************************************************************************
2014    *                 Code to take care of linking next flags                *
2015    *************************************************************************/
2016    current = GET_FIRST_PARAM_ID(p_table);
2017    if (position == current){
2018        //DO NOTHING
2019    } else if (position < current){
2020        SET_NEXT_PARAM_ID(position, p_table, current);
2021        SET_FIRST_PARAM_ID(p_table, position);
2022    } else {
2023        /* Search for the position in the linked list where we need to slot in*/
2024        while (position > GET_NEXT_PARAM_ID(current, p_table))
2025            current = GET_NEXT_PARAM_ID(current, p_table);
2026
2027        /*If node already exists no need to alter linking*/
2028        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2029            next = GET_NEXT_PARAM_ID(current, p_table);
2030            SET_NEXT_PARAM_ID(current, p_table, position);
2031            SET_NEXT_PARAM_ID(position, p_table, next);
2032        }
2033    }
2034
2035    /*************************************************************************
2036    *                   Copy contents into entry                             *
2037    *************************************************************************/
2038
2039    if (paramLength > sizeof(parm_type_t)) {
2040        ALOGE("%s:Size of input larger than max entry size",__func__);
2041        return BAD_VALUE;
2042    }
2043    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2044    return NO_ERROR;
2045}
2046
2047/*===========================================================================
2048 * FUNCTION   : lookupFwkName
2049 *
2050 * DESCRIPTION: In case the enum is not same in fwk and backend
2051 *              make sure the parameter is correctly propogated
2052 *
2053 * PARAMETERS  :
2054 *   @arr      : map between the two enums
2055 *   @len      : len of the map
2056 *   @hal_name : name of the hal_parm to map
2057 *
2058 * RETURN     : int type of status
2059 *              fwk_name  -- success
2060 *              none-zero failure code
2061 *==========================================================================*/
2062int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2063                                             int len, int hal_name)
2064{
2065
2066    for (int i = 0; i < len; i++) {
2067        if (arr[i].hal_name == hal_name)
2068            return arr[i].fwk_name;
2069    }
2070
2071    /* Not able to find matching framework type is not necessarily
2072     * an error case. This happens when mm-camera supports more attributes
2073     * than the frameworks do */
2074    ALOGD("%s: Cannot find matching framework type", __func__);
2075    return NAME_NOT_FOUND;
2076}
2077
2078/*===========================================================================
2079 * FUNCTION   : lookupHalName
2080 *
2081 * DESCRIPTION: In case the enum is not same in fwk and backend
2082 *              make sure the parameter is correctly propogated
2083 *
2084 * PARAMETERS  :
2085 *   @arr      : map between the two enums
2086 *   @len      : len of the map
2087 *   @fwk_name : name of the hal_parm to map
2088 *
2089 * RETURN     : int32_t type of status
2090 *              hal_name  -- success
2091 *              none-zero failure code
2092 *==========================================================================*/
2093int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2094                                             int len, int fwk_name)
2095{
2096    for (int i = 0; i < len; i++) {
2097       if (arr[i].fwk_name == fwk_name)
2098           return arr[i].hal_name;
2099    }
2100    ALOGE("%s: Cannot find matching hal type", __func__);
2101    return NAME_NOT_FOUND;
2102}
2103
2104/*===========================================================================
2105 * FUNCTION   : getCapabilities
2106 *
2107 * DESCRIPTION: query camera capabilities
2108 *
2109 * PARAMETERS :
2110 *   @cameraId  : camera Id
2111 *   @info      : camera info struct to be filled in with camera capabilities
2112 *
2113 * RETURN     : int32_t type of status
2114 *              NO_ERROR  -- success
2115 *              none-zero failure code
2116 *==========================================================================*/
2117int QCamera3HardwareInterface::getCamInfo(int cameraId,
2118                                    struct camera_info *info)
2119{
2120    int rc = 0;
2121
2122    if (NULL == gCamCapability[cameraId]) {
2123        rc = initCapabilities(cameraId);
2124        if (rc < 0) {
2125            //pthread_mutex_unlock(&g_camlock);
2126            return rc;
2127        }
2128    }
2129
2130    if (NULL == gStaticMetadata[cameraId]) {
2131        rc = initStaticMetadata(cameraId);
2132        if (rc < 0) {
2133            return rc;
2134        }
2135    }
2136
2137    switch(gCamCapability[cameraId]->position) {
2138    case CAM_POSITION_BACK:
2139        info->facing = CAMERA_FACING_BACK;
2140        break;
2141
2142    case CAM_POSITION_FRONT:
2143        info->facing = CAMERA_FACING_FRONT;
2144        break;
2145
2146    default:
2147        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2148        rc = -1;
2149        break;
2150    }
2151
2152
2153    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2154    info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2155    info->static_camera_characteristics = gStaticMetadata[cameraId];
2156
2157    return rc;
2158}
2159
2160/*===========================================================================
2161 * FUNCTION   : translateMetadata
2162 *
2163 * DESCRIPTION: translate the metadata into camera_metadata_t
2164 *
2165 * PARAMETERS : type of the request
2166 *
2167 *
2168 * RETURN     : success: camera_metadata_t*
2169 *              failure: NULL
2170 *
2171 *==========================================================================*/
2172camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2173{
2174    pthread_mutex_lock(&mMutex);
2175
2176    if (mDefaultMetadata[type] != NULL) {
2177        pthread_mutex_unlock(&mMutex);
2178        return mDefaultMetadata[type];
2179    }
2180    //first time we are handling this request
2181    //fill up the metadata structure using the wrapper class
2182    CameraMetadata settings;
2183    //translate from cam_capability_t to camera_metadata_tag_t
2184    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2185    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2186
2187    /*control*/
2188
2189    uint8_t controlIntent = 0;
2190    switch (type) {
2191      case CAMERA3_TEMPLATE_PREVIEW:
2192        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2193        break;
2194      case CAMERA3_TEMPLATE_STILL_CAPTURE:
2195        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2196        break;
2197      case CAMERA3_TEMPLATE_VIDEO_RECORD:
2198        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2199        break;
2200      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2201        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2202        break;
2203      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2204        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2205        break;
2206      default:
2207        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2208        break;
2209    }
2210    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2211
2212    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2213            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2214
2215    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2216    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2217
2218    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2219    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2220
2221    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2222    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2223
2224    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2225    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2226
2227    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2228    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2229
2230    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2231    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2232
2233    static uint8_t focusMode;
2234    if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2235        ALOGE("%s: Setting focus mode to auto", __func__);
2236        focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2237    } else {
2238        ALOGE("%s: Setting focus mode to off", __func__);
2239        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2240    }
2241    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2242
2243    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2244    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2245
2246    /*flash*/
2247    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2248    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2249
2250
2251    /* lens */
2252    float default_aperture = gCamCapability[mCameraId]->apertures[0];
2253    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2254
2255    if (gCamCapability[mCameraId]->filter_densities_count) {
2256        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2257        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2258                        gCamCapability[mCameraId]->filter_densities_count);
2259    }
2260
2261    /* TODO: Enable focus lengths once supported*/
2262    /*if (gCamCapability[mCameraId]->focal_lengths_count) {
2263        float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0];
2264        settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2265    }*/
2266
2267    mDefaultMetadata[type] = settings.release();
2268
2269    pthread_mutex_unlock(&mMutex);
2270    return mDefaultMetadata[type];
2271}
2272
2273/*===========================================================================
2274 * FUNCTION   : setFrameParameters
2275 *
2276 * DESCRIPTION: set parameters per frame as requested in the metadata from
2277 *              framework
2278 *
2279 * PARAMETERS :
2280 *   @settings  : frame settings information from framework
2281 *
2282 *
2283 * RETURN     : success: NO_ERROR
2284 *              failure:
2285 *==========================================================================*/
2286int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2287                                                  const camera_metadata_t *settings)
2288{
2289    /*translate from camera_metadata_t type to parm_type_t*/
2290    int rc = 0;
2291    if (settings == NULL && mFirstRequest) {
2292        /*settings cannot be null for the first request*/
2293        return BAD_VALUE;
2294    }
2295
2296    int32_t hal_version = CAM_HAL_V3;
2297
2298    memset(mParameters, 0, sizeof(parm_buffer_t));
2299    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2300    AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2301                sizeof(hal_version), &hal_version);
2302
2303    /*we need to update the frame number in the parameters*/
2304    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2305                                sizeof(frame_id), &frame_id);
2306    if (rc < 0) {
2307        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2308        return BAD_VALUE;
2309    }
2310
2311    if(settings != NULL){
2312        rc = translateMetadataToParameters(settings);
2313    }
2314    /*set the parameters to backend*/
2315    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2316    return rc;
2317}
2318
2319/*===========================================================================
2320 * FUNCTION   : translateMetadataToParameters
2321 *
2322 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2323 *
2324 *
2325 * PARAMETERS :
2326 *   @settings  : frame settings information from framework
2327 *
2328 *
2329 * RETURN     : success: NO_ERROR
2330 *              failure:
2331 *==========================================================================*/
2332int QCamera3HardwareInterface::translateMetadataToParameters
2333                                  (const camera_metadata_t *settings)
2334{
2335    int rc = 0;
2336    CameraMetadata frame_settings;
2337    frame_settings = settings;
2338
2339
2340    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2341        int32_t antibandingMode =
2342            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2343        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2344                sizeof(antibandingMode), &antibandingMode);
2345    }
2346
2347    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2348        int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2349        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2350          sizeof(expCompensation), &expCompensation);
2351    }
2352
2353    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2354        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2355        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2356                sizeof(aeLock), &aeLock);
2357    }
2358
2359    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2360        cam_fps_range_t fps_range;
2361        fps_range.min_fps =
2362            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2363        fps_range.max_fps =
2364            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2365        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2366                sizeof(fps_range), &fps_range);
2367    }
2368
2369    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2370        uint8_t fwk_focusMode =
2371            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2372        uint8_t focusMode = lookupHalName(FOCUS_MODES_MAP,
2373                                          sizeof(FOCUS_MODES_MAP),
2374                                          fwk_focusMode);
2375        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2376                sizeof(focusMode), &focusMode);
2377    }
2378
2379    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2380        uint8_t awbLock =
2381            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2382        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2383                sizeof(awbLock), &awbLock);
2384    }
2385
2386    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2387        uint8_t fwk_whiteLevel =
2388            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2389        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2390                sizeof(WHITE_BALANCE_MODES_MAP),
2391                fwk_whiteLevel);
2392        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2393                sizeof(whiteLevel), &whiteLevel);
2394    }
2395
2396    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2397        uint8_t fwk_effectMode =
2398            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2399        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2400                sizeof(EFFECT_MODES_MAP),
2401                fwk_effectMode);
2402        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2403                sizeof(effectMode), &effectMode);
2404    }
2405
2406    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2407        uint8_t fwk_aeMode =
2408            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2409        uint8_t aeMode;
2410        int32_t redeye;
2411        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2412            aeMode = CAM_AE_MODE_OFF;
2413        } else {
2414            aeMode = CAM_AE_MODE_ON;
2415        }
2416        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2417            redeye = 1;
2418        } else {
2419            redeye = 0;
2420        }
2421        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2422                                          sizeof(AE_FLASH_MODE_MAP),
2423                                          aeMode);
2424        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2425                sizeof(aeMode), &aeMode);
2426        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2427                sizeof(flashMode), &flashMode);
2428        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2429                sizeof(redeye), &redeye);
2430    }
2431
2432    if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) {
2433        int32_t metaFrameNumber =
2434            frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0];
2435        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2436                sizeof(metaFrameNumber), &metaFrameNumber);
2437    }
2438
2439    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2440        uint8_t colorCorrectMode =
2441            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2442        rc =
2443            AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2444                    sizeof(colorCorrectMode), &colorCorrectMode);
2445    }
2446    cam_trigger_t aecTrigger;
2447    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
2448    aecTrigger.trigger_id = -1;
2449    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2450        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2451        aecTrigger.trigger =
2452            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2453        aecTrigger.trigger_id =
2454            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2455    }
2456    rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2457                                sizeof(aecTrigger), &aecTrigger);
2458
2459    /*af_trigger must come with a trigger id*/
2460    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2461        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2462        cam_trigger_t af_trigger;
2463        af_trigger.trigger =
2464            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2465        af_trigger.trigger_id =
2466            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2467        rc = AddSetParmEntryToBatch(mParameters,
2468                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2469    }
2470
2471    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2472        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2473        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2474                sizeof(metaMode), &metaMode);
2475    }
2476
2477    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2478        int32_t demosaic =
2479            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2480        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2481                sizeof(demosaic), &demosaic);
2482    }
2483
2484    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2485        uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2486        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE,
2487                sizeof(edgeMode), &edgeMode);
2488    }
2489
2490    if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2491        int32_t edgeStrength =
2492            frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2493        rc = AddSetParmEntryToBatch(mParameters,
2494                CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2495    }
2496
2497    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2498        uint8_t flashMode =
2499            frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2500        rc = AddSetParmEntryToBatch(mParameters,
2501                CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode);
2502    }
2503
2504    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2505        uint8_t flashPower =
2506            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2507        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2508                sizeof(flashPower), &flashPower);
2509    }
2510
2511    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2512        int64_t flashFiringTime =
2513            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2514        rc = AddSetParmEntryToBatch(mParameters,
2515                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2516    }
2517
2518    if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
2519        uint8_t geometricMode =
2520            frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
2521        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
2522                sizeof(geometricMode), &geometricMode);
2523    }
2524
2525    if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
2526        uint8_t geometricStrength =
2527            frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
2528        rc = AddSetParmEntryToBatch(mParameters,
2529                CAM_INTF_META_GEOMETRIC_STRENGTH,
2530                sizeof(geometricStrength), &geometricStrength);
2531    }
2532
2533    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2534        uint8_t hotPixelMode =
2535            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2536        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2537                sizeof(hotPixelMode), &hotPixelMode);
2538    }
2539
2540    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2541        float lensAperture =
2542            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2543        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2544                sizeof(lensAperture), &lensAperture);
2545    }
2546
2547    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2548        float filterDensity =
2549            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2550        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2551                sizeof(filterDensity), &filterDensity);
2552    }
2553
2554    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2555        float focalLength =
2556            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2557        rc = AddSetParmEntryToBatch(mParameters,
2558                CAM_INTF_META_LENS_FOCAL_LENGTH,
2559                sizeof(focalLength), &focalLength);
2560    }
2561
2562    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2563        float focalDistance =
2564            frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2565        rc = AddSetParmEntryToBatch(mParameters,
2566                CAM_INTF_META_LENS_FOCUS_DISTANCE,
2567                sizeof(focalDistance), &focalDistance);
2568    }
2569
2570    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2571        uint8_t optStabMode =
2572            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2573        rc = AddSetParmEntryToBatch(mParameters,
2574                CAM_INTF_META_LENS_OPT_STAB_MODE,
2575                sizeof(optStabMode), &optStabMode);
2576    }
2577
2578    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2579        uint8_t noiseRedMode =
2580            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2581        rc = AddSetParmEntryToBatch(mParameters,
2582                CAM_INTF_META_NOISE_REDUCTION_MODE,
2583                sizeof(noiseRedMode), &noiseRedMode);
2584    }
2585
2586    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2587        uint8_t noiseRedStrength =
2588            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2589        rc = AddSetParmEntryToBatch(mParameters,
2590                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2591                sizeof(noiseRedStrength), &noiseRedStrength);
2592    }
2593
2594    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2595        cam_crop_region_t scalerCropRegion;
2596        scalerCropRegion.left =
2597            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2598        scalerCropRegion.top =
2599            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2600        scalerCropRegion.width =
2601            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2602        scalerCropRegion.height =
2603            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2604        rc = AddSetParmEntryToBatch(mParameters,
2605                CAM_INTF_META_SCALER_CROP_REGION,
2606                sizeof(scalerCropRegion), &scalerCropRegion);
2607    }
2608
2609    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2610        int64_t sensorExpTime =
2611            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2612        rc = AddSetParmEntryToBatch(mParameters,
2613                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2614                sizeof(sensorExpTime), &sensorExpTime);
2615    }
2616
2617    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2618        int64_t sensorFrameDuration =
2619            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2620        rc = AddSetParmEntryToBatch(mParameters,
2621                CAM_INTF_META_SENSOR_FRAME_DURATION,
2622                sizeof(sensorFrameDuration), &sensorFrameDuration);
2623    }
2624
2625    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2626        int32_t sensorSensitivity =
2627            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2628        rc = AddSetParmEntryToBatch(mParameters,
2629                CAM_INTF_META_SENSOR_SENSITIVITY,
2630                sizeof(sensorSensitivity), &sensorSensitivity);
2631    }
2632
2633    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
2634        int32_t shadingMode =
2635            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
2636        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
2637                sizeof(shadingMode), &shadingMode);
2638    }
2639
2640    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
2641        uint8_t shadingStrength =
2642            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
2643        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
2644                sizeof(shadingStrength), &shadingStrength);
2645    }
2646
2647    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
2648        uint8_t facedetectMode =
2649            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
2650        rc = AddSetParmEntryToBatch(mParameters,
2651                CAM_INTF_META_STATS_FACEDETECT_MODE,
2652                sizeof(facedetectMode), &facedetectMode);
2653    }
2654
2655    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
2656        uint8_t histogramMode =
2657            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
2658        rc = AddSetParmEntryToBatch(mParameters,
2659                CAM_INTF_META_STATS_HISTOGRAM_MODE,
2660                sizeof(histogramMode), &histogramMode);
2661    }
2662
2663    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
2664        uint8_t sharpnessMapMode =
2665            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
2666        rc = AddSetParmEntryToBatch(mParameters,
2667                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
2668                sizeof(sharpnessMapMode), &sharpnessMapMode);
2669    }
2670
2671    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
2672        uint8_t tonemapMode =
2673            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
2674        rc = AddSetParmEntryToBatch(mParameters,
2675                CAM_INTF_META_TONEMAP_MODE,
2676                sizeof(tonemapMode), &tonemapMode);
2677    }
2678
2679    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2680        uint8_t captureIntent =
2681            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2682        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2683                sizeof(captureIntent), &captureIntent);
2684    }
2685
2686    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
2687        cam_area_t roi;
2688        convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
2689        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
2690                sizeof(roi), &roi);
2691    }
2692
2693    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
2694        cam_area_t roi;
2695        convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
2696        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
2697                sizeof(roi), &roi);
2698    }
2699
2700    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
2701        cam_area_t roi;
2702        convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
2703        rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
2704                sizeof(roi), &roi);
2705    }
2706    return rc;
2707}
2708
2709/*===========================================================================
2710 * FUNCTION   : getJpegSettings
2711 *
2712 * DESCRIPTION: save the jpeg settings in the HAL
2713 *
2714 *
2715 * PARAMETERS :
2716 *   @settings  : frame settings information from framework
2717 *
2718 *
2719 * RETURN     : success: NO_ERROR
2720 *              failure:
2721 *==========================================================================*/
2722int QCamera3HardwareInterface::getJpegSettings
2723                                  (const camera_metadata_t *settings)
2724{
2725    if (mJpegSettings) {
2726        free(mJpegSettings);
2727        mJpegSettings = NULL;
2728    }
2729    mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
2730    CameraMetadata jpeg_settings;
2731    jpeg_settings = settings;
2732
2733    if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
2734        mJpegSettings->jpeg_orientation =
2735            jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
2736    } else {
2737        mJpegSettings->jpeg_orientation = 0;
2738    }
2739    if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
2740        mJpegSettings->jpeg_quality =
2741            jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
2742    } else {
2743        mJpegSettings->jpeg_quality = 85;
2744    }
2745    if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2746        mJpegSettings->thumbnail_size.width =
2747            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
2748        mJpegSettings->thumbnail_size.height =
2749            jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
2750    } else {
2751        mJpegSettings->thumbnail_size.width = 0;
2752        mJpegSettings->thumbnail_size.height = 0;
2753    }
2754    if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
2755        for (int i = 0; i < 3; i++) {
2756            mJpegSettings->gps_coordinates[i] =
2757                jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
2758        }
2759    }
2760    if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
2761        mJpegSettings->gps_timestamp =
2762            jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
2763    }
2764
2765    if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
2766        mJpegSettings->gps_processing_method =
2767            jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0];
2768    }
2769    if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2770        mJpegSettings->sensor_sensitivity =
2771            jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2772    }
2773    if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2774        mJpegSettings->lens_focal_length =
2775            jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2776    }
2777    if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2778        mJpegSettings->exposure_compensation =
2779            jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2780    }
2781    mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
2782    mJpegSettings->max_jpeg_size = calcMaxJpegSize();
2783    return 0;
2784}
2785
2786/*===========================================================================
2787 * FUNCTION   : captureResultCb
2788 *
2789 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
2790 *
2791 * PARAMETERS :
2792 *   @frame  : frame information from mm-camera-interface
2793 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
2794 *   @userdata: userdata
2795 *
2796 * RETURN     : NONE
2797 *==========================================================================*/
2798void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
2799                camera3_stream_buffer_t *buffer,
2800                uint32_t frame_number, void *userdata)
2801{
2802    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
2803    if (hw == NULL) {
2804        ALOGE("%s: Invalid hw %p", __func__, hw);
2805        return;
2806    }
2807
2808    hw->captureResultCb(metadata, buffer, frame_number);
2809    return;
2810}
2811
2812/*===========================================================================
2813 * FUNCTION   : initialize
2814 *
2815 * DESCRIPTION: Pass framework callback pointers to HAL
2816 *
2817 * PARAMETERS :
2818 *
2819 *
2820 * RETURN     : Success : 0
2821 *              Failure: -ENODEV
2822 *==========================================================================*/
2823
2824int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
2825                                  const camera3_callback_ops_t *callback_ops)
2826{
2827    ALOGV("%s: E", __func__);
2828    QCamera3HardwareInterface *hw =
2829        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2830    if (!hw) {
2831        ALOGE("%s: NULL camera device", __func__);
2832        return -ENODEV;
2833    }
2834
2835    int rc = hw->initialize(callback_ops);
2836    ALOGV("%s: X", __func__);
2837    return rc;
2838}
2839
2840/*===========================================================================
2841 * FUNCTION   : configure_streams
2842 *
2843 * DESCRIPTION:
2844 *
2845 * PARAMETERS :
2846 *
2847 *
2848 * RETURN     : Success: 0
2849 *              Failure: -EINVAL (if stream configuration is invalid)
2850 *                       -ENODEV (fatal error)
2851 *==========================================================================*/
2852
2853int QCamera3HardwareInterface::configure_streams(
2854        const struct camera3_device *device,
2855        camera3_stream_configuration_t *stream_list)
2856{
2857    ALOGV("%s: E", __func__);
2858    QCamera3HardwareInterface *hw =
2859        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2860    if (!hw) {
2861        ALOGE("%s: NULL camera device", __func__);
2862        return -ENODEV;
2863    }
2864    int rc = hw->configureStreams(stream_list);
2865    ALOGV("%s: X", __func__);
2866    return rc;
2867}
2868
2869/*===========================================================================
2870 * FUNCTION   : register_stream_buffers
2871 *
2872 * DESCRIPTION: Register stream buffers with the device
2873 *
2874 * PARAMETERS :
2875 *
2876 * RETURN     :
2877 *==========================================================================*/
2878int QCamera3HardwareInterface::register_stream_buffers(
2879        const struct camera3_device *device,
2880        const camera3_stream_buffer_set_t *buffer_set)
2881{
2882    ALOGV("%s: E", __func__);
2883    QCamera3HardwareInterface *hw =
2884        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2885    if (!hw) {
2886        ALOGE("%s: NULL camera device", __func__);
2887        return -ENODEV;
2888    }
2889    int rc = hw->registerStreamBuffers(buffer_set);
2890    ALOGV("%s: X", __func__);
2891    return rc;
2892}
2893
2894/*===========================================================================
2895 * FUNCTION   : construct_default_request_settings
2896 *
2897 * DESCRIPTION: Configure a settings buffer to meet the required use case
2898 *
2899 * PARAMETERS :
2900 *
2901 *
2902 * RETURN     : Success: Return valid metadata
2903 *              Failure: Return NULL
2904 *==========================================================================*/
2905const camera_metadata_t* QCamera3HardwareInterface::
2906    construct_default_request_settings(const struct camera3_device *device,
2907                                        int type)
2908{
2909
2910    ALOGV("%s: E", __func__);
2911    camera_metadata_t* fwk_metadata = NULL;
2912    QCamera3HardwareInterface *hw =
2913        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2914    if (!hw) {
2915        ALOGE("%s: NULL camera device", __func__);
2916        return NULL;
2917    }
2918
2919    fwk_metadata = hw->translateCapabilityToMetadata(type);
2920
2921    ALOGV("%s: X", __func__);
2922    return fwk_metadata;
2923}
2924
2925/*===========================================================================
2926 * FUNCTION   : process_capture_request
2927 *
2928 * DESCRIPTION:
2929 *
2930 * PARAMETERS :
2931 *
2932 *
2933 * RETURN     :
2934 *==========================================================================*/
2935int QCamera3HardwareInterface::process_capture_request(
2936                    const struct camera3_device *device,
2937                    camera3_capture_request_t *request)
2938{
2939    ALOGV("%s: E", __func__);
2940    QCamera3HardwareInterface *hw =
2941        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2942    if (!hw) {
2943        ALOGE("%s: NULL camera device", __func__);
2944        return -EINVAL;
2945    }
2946
2947    int rc = hw->processCaptureRequest(request);
2948    ALOGV("%s: X", __func__);
2949    return rc;
2950}
2951
2952/*===========================================================================
2953 * FUNCTION   : get_metadata_vendor_tag_ops
2954 *
2955 * DESCRIPTION:
2956 *
2957 * PARAMETERS :
2958 *
2959 *
2960 * RETURN     :
2961 *==========================================================================*/
2962
2963void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
2964                const struct camera3_device *device,
2965                vendor_tag_query_ops_t* ops)
2966{
2967    ALOGV("%s: E", __func__);
2968    QCamera3HardwareInterface *hw =
2969        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2970    if (!hw) {
2971        ALOGE("%s: NULL camera device", __func__);
2972        return;
2973    }
2974
2975    hw->getMetadataVendorTagOps(ops);
2976    ALOGV("%s: X", __func__);
2977    return;
2978}
2979
2980/*===========================================================================
2981 * FUNCTION   : dump
2982 *
2983 * DESCRIPTION:
2984 *
2985 * PARAMETERS :
2986 *
2987 *
2988 * RETURN     :
2989 *==========================================================================*/
2990
2991void QCamera3HardwareInterface::dump(
2992                const struct camera3_device *device, int fd)
2993{
2994    ALOGV("%s: E", __func__);
2995    QCamera3HardwareInterface *hw =
2996        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
2997    if (!hw) {
2998        ALOGE("%s: NULL camera device", __func__);
2999        return;
3000    }
3001
3002    hw->dump(fd);
3003    ALOGV("%s: X", __func__);
3004    return;
3005}
3006
3007/*===========================================================================
3008 * FUNCTION   : close_camera_device
3009 *
3010 * DESCRIPTION:
3011 *
3012 * PARAMETERS :
3013 *
3014 *
3015 * RETURN     :
3016 *==========================================================================*/
3017int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3018{
3019    ALOGV("%s: E", __func__);
3020    int ret = NO_ERROR;
3021    QCamera3HardwareInterface *hw =
3022        reinterpret_cast<QCamera3HardwareInterface *>(
3023            reinterpret_cast<camera3_device_t *>(device)->priv);
3024    if (!hw) {
3025        ALOGE("NULL camera device");
3026        return BAD_VALUE;
3027    }
3028    delete hw;
3029    ALOGV("%s: X", __func__);
3030    return ret;
3031}
3032
3033}; //end namespace qcamera
3034